Jannis Leidel avatar Jannis Leidel committed 02c77e7

Added Google Cloud Storage backend based on the boto S3 storage backend.

Comments (0)

Files changed (3)

storages/__init__.py

-__version__ = '1.1.5'
+__version__ = '1.1.6a1'

storages/backends/gs.py

+from django.conf import settings
+from storages.backends.s3boto import S3BotoStorage
+
+from boto.gs.connection import GSConnection, SubdomainCallingFormat
+from boto.exception import GSResponseError
+
+ACCESS_KEY_NAME = getattr(settings, 'GS_ACCESS_KEY_ID', None)
+SECRET_KEY_NAME = getattr(settings, 'GS_SECRET_ACCESS_KEY', None)
+HEADERS = getattr(settings, 'GS_HEADERS', {})
+STORAGE_BUCKET_NAME = getattr(settings, 'GS_BUCKET_NAME', None)
+AUTO_CREATE_BUCKET = getattr(settings, 'GS_AUTO_CREATE_BUCKET', False)
+DEFAULT_ACL = getattr(settings, 'GS_DEFAULT_ACL', 'public-read')
+BUCKET_ACL = getattr(settings, 'GS_BUCKET_ACL', DEFAULT_ACL)
+QUERYSTRING_AUTH = getattr(settings, 'GS_QUERYSTRING_AUTH', True)
+QUERYSTRING_EXPIRE = getattr(settings, 'GS_QUERYSTRING_EXPIRE', 3600)
+REDUCED_REDUNDANCY = getattr(settings, 'GS_REDUCED_REDUNDANCY', False)
+LOCATION = getattr(settings, 'GS_LOCATION', '')
+CUSTOM_DOMAIN = getattr(settings, 'GS_CUSTOM_DOMAIN', None)
+CALLING_FORMAT = getattr(settings, 'GS_CALLING_FORMAT', SubdomainCallingFormat())
+SECURE_URLS = getattr(settings, 'GS_SECURE_URLS', True)
+FILE_NAME_CHARSET = getattr(settings, 'GS_FILE_NAME_CHARSET', 'utf-8')
+FILE_OVERWRITE = getattr(settings, 'GS_FILE_OVERWRITE', True)
+FILE_BUFFER_SIZE = getattr(settings, 'GS_FILE_BUFFER_SIZE', 5242880)
+IS_GZIPPED = getattr(settings, 'GS_IS_GZIPPED', False)
+PRELOAD_METADATA = getattr(settings, 'GS_PRELOAD_METADATA', False)
+GZIP_CONTENT_TYPES = getattr(settings, 'GS_GZIP_CONTENT_TYPES', (
+    'text/css',
+    'application/javascript',
+    'application/x-javascript',
+))
+
+
+class GSBotoStorage(S3BotoStorage):
+    connection_class = GSConnection
+    connection_response_error = GSResponseError
+
+    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
+            secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL,
+            headers=HEADERS, gzip=IS_GZIPPED,
+            gzip_content_types=GZIP_CONTENT_TYPES,
+            querystring_auth=QUERYSTRING_AUTH,
+            querystring_expire=QUERYSTRING_EXPIRE,
+            reduced_redundancy=REDUCED_REDUNDANCY,
+            custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
+            location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
+            preload_metadata=PRELOAD_METADATA,
+            calling_format=CALLING_FORMAT):
+        super(GSBotoStorage, self).__init__(bucket=bucket,
+            access_key=access_key, secret_key=secret_key,
+            bucket_acl=bucket_acl, acl=acl, headers=headers, gzip=gzip,
+            gzip_content_types=gzip_content_types,
+            querystring_auth=querystring_auth,
+            querystring_expire=querystring_expire,
+            reduced_redundancy=reduced_redundancy,
+            custom_domain=custom_domain, secure_urls=secure_urls,
+            location=location, file_name_charset=file_name_charset,
+            preload_metadata=preload_metadata, calling_format=calling_format)

storages/backends/s3boto.py

     from boto.s3.key import Key
 except ImportError:
     raise ImproperlyConfigured("Could not load Boto's S3 bindings.\n"
-                               "See http://code.google.com/p/boto/")
+                               "See https://github.com/boto/boto")
 
 ACCESS_KEY_NAME = getattr(settings, 'AWS_S3_ACCESS_KEY_ID', getattr(settings, 'AWS_ACCESS_KEY_ID', None))
 SECRET_KEY_NAME = getattr(settings, 'AWS_S3_SECRET_ACCESS_KEY', getattr(settings, 'AWS_SECRET_ACCESS_KEY', None))
     'application/x-javascript',
 ))
 
+
 if IS_GZIPPED:
     from gzip import GzipFile
 
     # the final path is '/' (or nothing, in which case final_path must be
     # equal to base_path).
     base_path_len = len(base_path)
-    if not final_path.startswith(base_path) \
-       or final_path[base_path_len:base_path_len + 1] not in ('', '/'):
+    if (not final_path.startswith(base_path) or
+            final_path[base_path_len:base_path_len + 1] not in ('', '/')):
         raise ValueError('the joined path is located outside of the base path'
                          ' component')
 
     mode and supports streaming(buffering) data in chunks to S3
     when writing.
     """
+    connection_class = S3Connection
+    connection_response_error = S3ResponseError
 
     def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
             secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL,
         self.location = location or ''
         self.location = self.location.lstrip('/')
         self.file_name_charset = file_name_charset
-
+        self.calling_format = calling_format
+        self._entries = {}
         if not access_key and not secret_key:
             access_key, secret_key = self._get_access_keys()
-
-        self.connection = S3Connection(access_key, secret_key,
-            calling_format=calling_format)
-        self._entries = {}
+        self.connection = self.connection_class(access_key, secret_key,
+            calling_format=self.calling_format)
 
     @property
     def bucket(self):
         try:
             return self.connection.get_bucket(name,
                 validate=AUTO_CREATE_BUCKET)
-        except S3ResponseError:
+        except self.connection_response_error:
             if AUTO_CREATE_BUCKET:
                 bucket = self.connection.create_bucket(name)
                 bucket.set_acl(self.bucket_acl)
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.