Russell Moffitt avatar Russell Moffitt committed b2ac30a

added multiconfig feature in s3botomulti module, minor modifications to s3boto to bring AUTO_CREATE_BUCKET and FILE_OVERWRITE into S3BotoStorage init.

Comments (0)

Files changed (2)

storages/backends/s3boto.py

 class S3BotoStorage(Storage):
     """Amazon Simple Storage Service using Boto"""
 
-    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
-                       secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
+    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None, secret_key=None,
+                       bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
+                       auto_create_bucket=AUTO_CREATE_BUCKET,
                        gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                        querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
-                       reduced_redundancy=REDUCED_REDUNDANCY,
-                       custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
-                       location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
+                       reduced_redundancy=REDUCED_REDUNDANCY, custom_domain=CUSTOM_DOMAIN,
+                       secure_urls=SECURE_URLS, location=LOCATION,
+                       file_name_charset=FILE_NAME_CHARSET, file_overwrite=FILE_OVERWRITE,
                        preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):
         self.bucket_acl = bucket_acl
         self.bucket_name = bucket
         self.acl = acl
         self.headers = headers
+        self.auto_create_bucket = auto_create_bucket
         self.preload_metadata = preload_metadata
         self.gzip = gzip
         self.gzip_content_types = gzip_content_types
         self.location = location or ''
         self.location = self.location.lstrip('/')
         self.file_name_charset = file_name_charset
+        self.file_overwrite = file_overwrite
         
         if not access_key and not secret_key:
              access_key, secret_key = self._get_access_keys()
         try:
             return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
         except S3ResponseError, e:
-            if AUTO_CREATE_BUCKET:
+            if self.auto_create_bucket:
                 bucket = self.connection.create_bucket(name)
                 bucket.set_acl(self.bucket_acl)
                 return bucket
 
     def get_available_name(self, name):
         """ Overwrite existing file with the same name. """
-        if FILE_OVERWRITE:
+        if self.file_overwrite:
             name = self._clean_name(name)
             return name
         return super(S3BotoStorage, self).get_available_name(name)

storages/backends/s3botomulti.py

+from django.conf import settings
+from storages.backends.s3boto import S3BotoStorage, SubdomainCallingFormat
+
+SETTINGS_DICT = getattr(settings, 'STORAGES_MULTI_S3BOTO', {})
+for s3store in SETTINGS_DICT:
+    # Wrap class definition inside function closure to avoid conflicts between custom instances
+    def init_s3boto_storage_custom():
+        ACCESS_KEY_NAME     = SETTINGS_DICT[s3store].get('AWS_ACCESS_KEY_ID', None)
+        SECRET_KEY_NAME     = SETTINGS_DICT[s3store].get('AWS_SECRET_ACCESS_KEY', None)
+        HEADERS             = SETTINGS_DICT[s3store].get('AWS_HEADERS', {})
+        STORAGE_BUCKET_NAME = SETTINGS_DICT[s3store].get('AWS_STORAGE_BUCKET_NAME', None)
+        AUTO_CREATE_BUCKET  = SETTINGS_DICT[s3store].get('AWS_AUTO_CREATE_BUCKET', False)
+        DEFAULT_ACL         = SETTINGS_DICT[s3store].get('AWS_DEFAULT_ACL', 'public-read')
+        BUCKET_ACL          = SETTINGS_DICT[s3store].get('AWS_BUCKET_ACL', DEFAULT_ACL)
+        QUERYSTRING_AUTH    = SETTINGS_DICT[s3store].get('AWS_QUERYSTRING_AUTH', True)
+        QUERYSTRING_EXPIRE  = SETTINGS_DICT[s3store].get('AWS_QUERYSTRING_EXPIRE', 3600)
+        REDUCED_REDUNDANCY  = SETTINGS_DICT[s3store].get('AWS_REDUCED_REDUNDANCY', False)
+        LOCATION            = SETTINGS_DICT[s3store].get('AWS_LOCATION', '')
+        CUSTOM_DOMAIN       = SETTINGS_DICT[s3store].get('AWS_S3_CUSTOM_DOMAIN', None)
+        CALLING_FORMAT      = SETTINGS_DICT[s3store].get('AWS_S3_CALLING_FORMAT', SubdomainCallingFormat())
+        SECURE_URLS         = SETTINGS_DICT[s3store].get('AWS_S3_SECURE_URLS', True)
+        FILE_NAME_CHARSET   = SETTINGS_DICT[s3store].get('AWS_S3_FILE_NAME_CHARSET', 'utf-8')
+        FILE_OVERWRITE      = SETTINGS_DICT[s3store].get('AWS_S3_FILE_OVERWRITE', True)
+        IS_GZIPPED          = SETTINGS_DICT[s3store].get('AWS_IS_GZIPPED', False)
+        PRELOAD_METADATA    = SETTINGS_DICT[s3store].get('AWS_PRELOAD_METADATA', False)
+        GZIP_CONTENT_TYPES  = SETTINGS_DICT[s3store].get('GZIP_CONTENT_TYPES', (
+            'text/css',
+            'application/javascript',
+            'application/x-javascript'
+        ))
+
+        if IS_GZIPPED:
+            from gzip import GzipFile
+    
+        class S3BotoStorageCustom(S3BotoStorage):
+            """
+            S3 storage backend that looks for custom settings in STORAGES_CUSTOM_AWS_*
+            """
+            def __init__(self, *args, **kwargs):
+                super(S3BotoStorageCustom, self).__init__(*args, bucket=STORAGE_BUCKET_NAME, access_key=None, secret_key=None,
+                                   bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
+                                   auto_create_bucket=AUTO_CREATE_BUCKET,
+                                   gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
+                                   querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
+                                   reduced_redundancy=REDUCED_REDUNDANCY, custom_domain=CUSTOM_DOMAIN,
+                                   secure_urls=SECURE_URLS, location=LOCATION,
+                                   file_name_charset=FILE_NAME_CHARSET, file_overwrite=FILE_OVERWRITE,
+                                   preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT, **kwargs)
+            def _get_access_keys(self):
+                access_key = ACCESS_KEY_NAME
+                secret_key = SECRET_KEY_NAME
+                if (access_key or secret_key) and (not access_key or not secret_key):
+                   access_key = os.environ.get(ACCESS_KEY_NAME)
+                   secret_key = os.environ.get(SECRET_KEY_NAME)
+
+                if access_key and secret_key:
+                    # Both were provided, so use them
+                    return access_key, secret_key
+                else:
+                    # Still missing keys, so run the normal S3BotoStorage method
+                    # and get keys from global AWS_ACCESS_KEY_ID setting or
+                    # from environment variables
+                    return super(S3BotoStorageCustom, self)._get_access_keys()
+        
+        return S3BotoStorageCustom
+    
+    # Populate the module namespace with our custom initialized S3BotoStorageCustom
+    # class giving it a name of S3BotoStorage_name where name is the dict key given
+    # in the settings.
+    globals()['S3BotoStorage_'+s3store] = init_s3boto_storage_custom()
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.