Commits

Jannis Leidel committed 813a600

Fixed key handling in S3Boto and Google Storage backend and made sure the Google Storage backend uses the correct file class and boto API for durable reduced availability.

Comments (0)

Files changed (2)

storages/backends/gs.py

     file_class = GSBotoStorageFile
     key_class = GSKey
 
+    access_key_names = ['GS_ACCESS_KEY_ID']
+    secret_key_names = ['GS_SECRET_ACCESS_KEY']
+
     access_key = setting('GS_ACCESS_KEY_ID')
     secret_key = setting('GS_SECRET_ACCESS_KEY')
     file_overwrite = setting('GS_FILE_OVERWRITE', True)
     headers = setting('GS_HEADERS', {})
-    storage_bucket_name = setting('GS_BUCKET_NAME', None)
+    bucket_name = setting('GS_BUCKET_NAME', None)
     auto_create_bucket = setting('GS_AUTO_CREATE_BUCKET', False)
     default_acl = setting('GS_DEFAULT_ACL', 'public-read')
     bucket_acl = setting('GS_BUCKET_ACL', default_acl)
     querystring_auth = setting('GS_QUERYSTRING_AUTH', True)
     querystring_expire = setting('GS_QUERYSTRING_EXPIRE', 3600)
-    reduced_redundancy = setting('GS_REDUCED_REDUNDANCY', False)
+    durable_reduced_availability = setting('GS_DURABLE_REDUCED_AVAILABILITY', False)
     location = setting('GS_LOCATION', '')
     custom_domain = setting('GS_CUSTOM_DOMAIN')
     calling_format = setting('GS_CALLING_FORMAT', SubdomainCallingFormat())
         'application/x-javascript',
     ))
     url_protocol = setting('GS_URL_PROTOCOL', 'http:')
+
+    def _save_content(self, key, content, headers):
+        # only pass backwards incompatible arguments if they vary from the default
+        options = {}
+        if self.encryption:
+            options['encrypt_key'] = self.encryption
+        key.set_contents_from_file(content, headers=headers,
+                                   policy=self.default_acl,
+                                   rewind=True, **options)
+
+    def _get_or_create_bucket(self, name):
+        """
+        Retrieves a bucket if it exists, otherwise creates it.
+        """
+        if self.durable_reduced_availability:
+            storage_class = 'DURABLE_REDUCED_AVAILABILITY'
+        else:
+            storage_class = 'STANDARD'
+        try:
+            return self.connection.get_bucket(name,
+                validate=self.auto_create_bucket)
+        except self.connection_response_error:
+            if self.auto_create_bucket:
+                bucket = self.connection.create_bucket(name, storage_class=storage_class)
+                bucket.set_acl(self.bucket_acl)
+                return bucket
+            raise ImproperlyConfigured("Bucket %s does not exist. Buckets "
+                                       "can be automatically created by "
+                                       "setting appropriate setting." % name)

storages/backends/s3boto.py

     file_class = S3BotoStorageFile
     key_class = S3Key
 
+    # used for looking up the access and secret key from env vars
+    access_key_names = ['AWS_S3_ACCESS_KEY_ID', 'AWS_ACCESS_KEY_ID']
+    secret_key_names = ['AWS_S3_SECRET_ACCESS_KEY', 'AWS_SECRET_ACCESS_KEY']
+
     access_key = setting('AWS_S3_ACCESS_KEY_ID', setting('AWS_ACCESS_KEY_ID'))
     secret_key = setting('AWS_S3_SECRET_ACCESS_KEY', setting('AWS_SECRET_ACCESS_KEY'))
     file_overwrite = setting('AWS_S3_FILE_OVERWRITE', True)
         are provided to the class in the constructor or in the
         settings then get them from the environment variables.
         """
-        access_key = self.access_key
-        secret_key = self.secret_key
-
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            # TODO: this seems to be broken
-            access_key = os.environ.get(self.access_key)
-            secret_key = os.environ.get(self.secret_key)
-
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-
-        return None, None
+        def lookup_env(names):
+            for name in names:
+                value = os.environ.get(name)
+                if value:
+                    return value
+        access_key = self.access_key or lookup_env(self.access_key_names)
+        secret_key = self.secret_key or lookup_env(self.secret_key_names)
+        return access_key, secret_key
 
     def _get_or_create_bucket(self, name):
         """
 
     def _open(self, name, mode='rb'):
         name = self._normalize_name(self._clean_name(name))
-        f = S3BotoStorageFile(name, mode, self)
+        f = self.file_class(name, mode, self)
         if not f.key:
             raise IOError('File does not exist: %s' % name)
         return f
             self._entries[encoded_name] = key
 
         key.set_metadata('Content-Type', content_type)
+        self._save_content(key, content, headers=headers)
+        return cleaned_name
+
+    def _save_content(self, key, content, headers):
         # only pass backwards incompatible arguments if they vary from the default
         kwargs = {}
         if self.encryption:
                                    policy=self.default_acl,
                                    reduced_redundancy=self.reduced_redundancy,
                                    rewind=True, **kwargs)
-        return cleaned_name
 
     def delete(self, name):
         name = self._normalize_name(self._clean_name(name))