Commits

Rich Leland committed 711c555 Merge

Pulled mosso changes from smileychris

Comments (0)

Files changed (1)

backends/mosso.py

 Custom storage for django with Mosso Cloud Files backend.
 Created by Rich Leland <rich@richleland.com>.
 """
-import re
-
 from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
 from django.core.files import File
 from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
 from django.utils.text import get_valid_filename
 
-
 try:
     import cloudfiles
     from cloudfiles.errors import NoSuchObject
 except ImportError:
-    raise ImproperlyConfigured, "Could not load cloudfiles dependency. See http://www.mosso.com/cloudfiles.jsp."
-
-try:
-    CLOUDFILES_USERNAME = settings.CLOUDFILES_USERNAME
-    CLOUDFILES_API_KEY = settings.CLOUDFILES_API_KEY
-    CLOUDFILES_CONTAINER = settings.CLOUDFILES_CONTAINER
-except AttributeError:
-    raise ImproperlyConfigured, "CLOUDFILES_USERNAME, CLOUDFILES_API_KEY, and CLOUDFILES_CONTAINER must be supplied in settings.py."
+    raise ImproperlyConfigured("Could not load cloudfiles dependency. See "
+                               "http://www.mosso.com/cloudfiles.jsp.")
 
 # TODO: implement TTL into cloudfiles methods
 CLOUDFILES_TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
     """
     Simple, custom upload_to because Cloud Files doesn't support
     nested containers (directories).
-    
+
     Actually found this out from @minter:
-    @richleland The Cloud Files APIs do support pseudo-subdirectories, by 
+    @richleland The Cloud Files APIs do support pseudo-subdirectories, by
     creating zero-byte files with type application/directory.
-    
+
     May implement in a future version.
     """
     return get_valid_filename(filename)
     """
     Custom storage for Mosso Cloud Files.
     """
-    
-    def __init__(self):
+    default_quick_listdir = True
+
+    def __init__(self, username=None, api_key=None, container=None,
+                 connection_kwargs=None):
         """
-        Here we set up the connection and select the user-supplied container.
-        If the container isn't public (available on Limelight CDN), we make
-        it a publicly available container.
+        Initialize the settings for the connection and container.
         """
-        self.connection = cloudfiles.get_connection(CLOUDFILES_USERNAME,
-                                                    CLOUDFILES_API_KEY)
-        self.container = self.connection.get_container(CLOUDFILES_CONTAINER)
-        if not self.container.is_public():
-            self.container.make_public()
-    
+        self.username = username or settings.CLOUDFILES_USERNAME
+        self.api_key = api_key or settings.CLOUDFILES_API_KEY
+        self.container_name = container or settings.CLOUDFILES_CONTAINER
+        self.connection_kwargs = connection_kwargs or {}
+
+    def __getstate__(self):
+        """
+        Return a picklable representation of the storage.
+        """
+        return dict(username=self.username,
+                    api_key=self.api_key,
+                    container_name=self.container_name,
+                    connection_kwargs=self.connection_kwargs)
+
+    def _get_connection(self):
+        if not hasattr(self, '_connection'):
+            self._connection = cloudfiles.get_connection(self.username,
+                                    self.api_key, **self.connection_kwargs)
+        return self._connection
+
+    def _set_connection(self, value):
+        self._connection = value
+
+    connection = property(_get_connection, _set_connection)
+
+    def _get_container(self):
+        if not hasattr(self, '_container'):
+            self.container = self.connection.get_container(
+                                                        self.container_name)
+        return self._container
+
+    def _set_container(self, container):
+        """
+        Set the container, making it publicly available (on Limelight CDN) if
+        it is not already.
+        """
+        if not container.is_public():
+            container.make_public()
+        if hasattr(self, '_container_public_uri'):
+            delattr(self, '_container_public_uri')
+        self._container = container
+
+    container = property(_get_container, _set_container)
+
+    def _get_container_url(self):
+        if not hasattr(self, '_container_public_uri'):
+            self._container_public_uri = self.container.public_uri()
+        return self._container_public_uri
+
+    container_url = property(_get_container_url)
+
     def _get_cloud_obj(self, name):
         """
         Helper function to get retrieve the requested Cloud Files Object.
 
     def _open(self, name, mode='rb'):
         """
-        Not sure if this is the proper way to execute this. Would love input.
+        Return the CloudFilesStorageFile.
         """
-        return File(self._get_cloud_obj(name).read())
+        return CloudFilesStorageFile(storage=self, name=name)
 
     def _save(self, name, content):
         """
-        Here we're opening the content object and saving it to the Cloud Files
-        service. We have to set the content_type so it's delivered properly
-        when requested via public URI.
+        Use the Cloud Files service to write ``content`` to a remote file
+        (called ``name``).
         """
         content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
         cloud_obj = self.container.create_object(name)
+        # If the content type is available, pass it in directly rather than
+        # getting the cloud object to try to guess.
         if hasattr(content.file, 'content_type'):
             cloud_obj.content_type = content.file.content_type
-        cloud_obj.send(content_str)
+        cloud_obj.send(content)
         content.close()
         return name
-    
+
     def delete(self, name):
         """
         Deletes the specified file from the storage system.
 
     def exists(self, name):
         """
-        Returns True if a file referened by the given name already exists in the
-        storage system, or False if the name is available for a new file.
+        Returns True if a file referenced by the given name already exists in
+        the storage system, or False if the name is available for a new file.
         """
         try:
             self._get_cloud_obj(name)
             return True
         except NoSuchObject:
             return False
-        
+
     def listdir(self, path):
         """
+        Lists the contents of the specified path, returning a 2-tuple; the
+        first being an empty list of directories (not available for quick-
+        listing), the second being a list of filenames.
+
+        If the list of directories is required, use the full_listdir method.
+        """
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(path=path):
+            files.append(name[path_len:])
+        return ([], files)
+
+    def full_listdir(self, path):
+        """
         Lists the contents of the specified path, returning a 2-tuple of lists;
         the first item being directories, the second item being files.
+
+        On large containers, this may be a slow operation for root containers
+        because every single object must be returned (cloudfiles does not
+        provide an explicit way of listing directories).
         """
-        return ([], self.container.list_objects(path=path))
+        dirs = set()
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(prefix=path):
+            name = name[path_len:]
+            slash = name[1:-1].find('/') + 1
+            if slash:
+                dirs.add(name[:slash])
+            elif name:
+                files.append(name)
+        dirs = list(dirs)
+        dirs.sort()
+        return (dirs, files)
 
     def size(self, name):
         """
         Returns the total size, in bytes, of the file specified by name.
         """
-        return self._get_cloud_obj(name).size()
+        return self._get_cloud_obj(name).size
 
     def url(self, name):
         """
         Returns an absolute URL where the file's contents can be accessed
         directly by a web browser.
         """
-        return self._get_cloud_obj(name).public_uri()
+        return '%s/%s' % (self.container_url, name)
+
+
+class CloudFilesStorageFile(File):
+    closed = False
+
+    def __init__(self, storage, name, *args, **kwargs):
+        self._storage = storage
+        super(CloudFilesStorageFile, self).__init__(file=None, name=name,
+                                                    *args, **kwargs)
+
+    def _get_size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self.name)
+        return self._size
+
+    def _set_size(self, size):
+        self._size = size
+
+    size = property(_get_size, _set_size)
+
+    def _get_file(self):
+        if not hasattr(self, '_file'):
+            self._file = self._storage._get_cloud_obj(self.name)
+        return self._file
+
+    def _set_file(self, value):
+        if value is None:
+            if hasattr(self, '_file'):
+                del self._file
+        else:
+            self._file = value
+
+    file = property(_get_file, _set_file)
+
+    def read(self, num_bytes=None):
+        data = self.file.read(size=num_bytes or -1, offset=self._pos)
+        self._pos += len(data)
+        return data
+
+    def open(self, *args, **kwargs):
+        """
+        Open the cloud file object.
+        """
+        self.file
+        self._pos = 0
+
+    def close(self, *args, **kwargs):
+        self._pos = 0
+
+    @property
+    def closed(self):
+        return not hasattr(self, '_file')
+
+    def seek(self, pos):
+        self._pos = pos
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.