Commits

Jannis Leidel committed f5c2f7d Merge

Automated merge with ssh://bitbucket.org/david/django-storages

Comments (0)

Files changed (2)

storages/backends/gs.py

+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO  # noqa
+
 from django.core.exceptions import ImproperlyConfigured
 
 from storages.backends.s3boto import S3BotoStorage, S3BotoStorageFile, setting
 
 
 class GSBotoStorageFile(S3BotoStorageFile):
-    buffer_size = setting('GS_FILE_BUFFER_SIZE', 5242880)
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was not opened in write mode.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            provider = self.key.bucket.connection.provider
+            upload_headers = {provider.acl_header: self._storage.default_acl}
+            upload_headers.update(self._storage.headers)
+            self._storage._save_content(self.key, self.file, upload_headers)
+        self.key.close()
 
 
 class GSBotoStorage(S3BotoStorage):
     file_class = GSBotoStorageFile
     key_class = GSKey
 
+    access_key_names = ['GS_ACCESS_KEY_ID']
+    secret_key_names = ['GS_SECRET_ACCESS_KEY']
+
     access_key = setting('GS_ACCESS_KEY_ID')
     secret_key = setting('GS_SECRET_ACCESS_KEY')
     file_overwrite = setting('GS_FILE_OVERWRITE', True)
     headers = setting('GS_HEADERS', {})
-    storage_bucket_name = setting('GS_BUCKET_NAME', None)
+    bucket_name = setting('GS_BUCKET_NAME', None)
     auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False)
     default_acl = setting('GS_DEFAULT_ACL', 'public-read')
     bucket_acl = setting('GS_BUCKET_ACL', default_acl)
     querystring_auth = setting('GS_QUERYSTRING_AUTH', True)
     querystring_expire = setting('GS_QUERYSTRING_EXPIRE', 3600)
-    reduced_redundancy = setting('GS_REDUCED_REDUNDANCY', False)
+    durable_reduced_availability = setting('GS_DURABLE_REDUCED_AVAILABILITY', False)
     location = setting('GS_LOCATION', '')
     custom_domain = setting('GS_CUSTOM_DOMAIN')
     calling_format = setting('GS_CALLING_FORMAT', SubdomainCallingFormat())
         'application/x-javascript',
     ))
     url_protocol = setting('GS_URL_PROTOCOL', 'http:')
+
+    def _save_content(self, key, content, headers):
+        # only pass backwards incompatible arguments if they vary from the default
+        options = {}
+        if self.encryption:
+            options['encrypt_key'] = self.encryption
+        key.set_contents_from_file(content, headers=headers,
+                                   policy=self.default_acl,
+                                   rewind=True, **options)
+
+    def _get_or_create_bucket(self, name):
+        """
+        Retrieves a bucket if it exists, otherwise creates it.
+        """
+        if self.durable_reduced_availability:
+            storage_class = 'DURABLE_REDUCED_AVAILABILITY'
+        else:
+            storage_class = 'STANDARD'
+        try:
+            return self.connection.get_bucket(name,
+                validate=self.auto_create_bucket)
+        except self.connection_response_error:
+            if self.auto_create_bucket:
+                bucket = self.connection.create_bucket(name, storage_class=storage_class)
+                bucket.set_acl(self.bucket_acl)
+                return bucket
+            raise ImproperlyConfigured("Bucket %s does not exist. Buckets "
+                                       "can be automatically created by "
+                                       "setting appropriate setting." % name)

storages/backends/s3boto.py

     file_class = S3BotoStorageFile
     key_class = S3Key
 
+    # used for looking up the access and secret key from env vars
+    access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID']
+    secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY']
+
     access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID'))
     secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY'))
     file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True)
         are provided to the class in the constructor or in the
         settings then get them from the environment variables.
         """
-        access_key = self.access_key
-        secret_key = self.secret_key
-
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            # TODO: this seems to be broken
-            access_key = os.environ.get(self.access_key)
-            secret_key = os.environ.get(self.secret_key)
-
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-
-        return None, None
+        def lookup_env(names):
+            for name in names:
+                value = os.environ.get(name)
+                if value:
+                    return value
+        access_key = self.access_key or lookup_env(self.access_key_names)
+        secret_key = self.secret_key or lookup_env(self.secret_key_names)
+        return access_key, secret_key
 
     def _get_or_create_bucket(self, name):
         """
 
     def _open(self, name, mode='rb'):
         name = self._normalize_name(self._clean_name(name))
-        f = S3BotoStorageFile(name, mode, self)
+        f = self.file_class(name, mode, self)
         if not f.key:
             raise IOError('File does not exist: %s' % name)
         return f
             self._entries[encoded_name] = key
 
         key.set_metadata('Content-Type', content_type)
+        self._save_content(key, content, headers=headers)
+        return cleaned_name
+
+    def _save_content(self, key, content, headers):
         # only pass backwards incompatible arguments if they vary from the default
         kwargs = {}
         if self.encryption:
                                    policy=self.default_acl,
                                    reduced_redundancy=self.reduced_redundancy,
                                    rewind=True, **kwargs)
-        return cleaned_name
 
     def delete(self, name):
         name = self._normalize_name(self._clean_name(name))
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.