Commits

Jannis Leidel committed 67b7075

Made S3boto storage PEP8 compatible.

Comments (0)

Files changed (1)

storages/backends/s3boto.py

 try:
     from cStringIO import StringIO
 except ImportError:
-    from StringIO import StringIO
+    from StringIO import StringIO  # noqa
 
 from django.conf import settings
 from django.core.files.base import File
     raise ImproperlyConfigured("Could not load Boto's S3 bindings.\n"
                                "See http://code.google.com/p/boto/")
 
-ACCESS_KEY_NAME     = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
-SECRET_KEY_NAME     = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
-HEADERS             = getattr(settings, 'AWS_HEADERS', {})
+ACCESS_KEY_NAME = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
+SECRET_KEY_NAME = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
+HEADERS = getattr(settings, 'AWS_HEADERS', {})
 STORAGE_BUCKET_NAME = getattr(settings, 'AWS_STORAGE_BUCKET_NAME', None)
-AUTO_CREATE_BUCKET  = getattr(settings, 'AWS_AUTO_CREATE_BUCKET', False)
-DEFAULT_ACL         = getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
-BUCKET_ACL          = getattr(settings, 'AWS_BUCKET_ACL', DEFAULT_ACL)
-QUERYSTRING_AUTH    = getattr(settings, 'AWS_QUERYSTRING_AUTH', True)
-QUERYSTRING_EXPIRE  = getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 3600)
-REDUCED_REDUNDANCY  = getattr(settings, 'AWS_REDUCED_REDUNDANCY', False)
-LOCATION            = getattr(settings, 'AWS_LOCATION', '')
-CUSTOM_DOMAIN       = getattr(settings, 'AWS_S3_CUSTOM_DOMAIN', None)
-CALLING_FORMAT      = getattr(settings, 'AWS_S3_CALLING_FORMAT', SubdomainCallingFormat())
-SECURE_URLS         = getattr(settings, 'AWS_S3_SECURE_URLS', True)
-FILE_NAME_CHARSET   = getattr(settings, 'AWS_S3_FILE_NAME_CHARSET', 'utf-8')
-FILE_OVERWRITE      = getattr(settings, 'AWS_S3_FILE_OVERWRITE', True)
-IS_GZIPPED          = getattr(settings, 'AWS_IS_GZIPPED', False)
-PRELOAD_METADATA    = getattr(settings, 'AWS_PRELOAD_METADATA', False)
-GZIP_CONTENT_TYPES  = getattr(settings, 'GZIP_CONTENT_TYPES', (
+AUTO_CREATE_BUCKET = getattr(settings, 'AWS_AUTO_CREATE_BUCKET', False)
+DEFAULT_ACL = getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
+BUCKET_ACL = getattr(settings, 'AWS_BUCKET_ACL', DEFAULT_ACL)
+QUERYSTRING_AUTH = getattr(settings, 'AWS_QUERYSTRING_AUTH', True)
+QUERYSTRING_EXPIRE = getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 3600)
+REDUCED_REDUNDANCY = getattr(settings, 'AWS_REDUCED_REDUNDANCY', False)
+LOCATION = getattr(settings, 'AWS_LOCATION', '')
+CUSTOM_DOMAIN = getattr(settings, 'AWS_S3_CUSTOM_DOMAIN', None)
+CALLING_FORMAT = getattr(settings, 'AWS_S3_CALLING_FORMAT',
+                         SubdomainCallingFormat())
+SECURE_URLS = getattr(settings, 'AWS_S3_SECURE_URLS', True)
+FILE_NAME_CHARSET = getattr(settings, 'AWS_S3_FILE_NAME_CHARSET', 'utf-8')
+FILE_OVERWRITE = getattr(settings, 'AWS_S3_FILE_OVERWRITE', True)
+IS_GZIPPED = getattr(settings, 'AWS_IS_GZIPPED', False)
+PRELOAD_METADATA = getattr(settings, 'AWS_PRELOAD_METADATA', False)
+GZIP_CONTENT_TYPES = getattr(settings, 'GZIP_CONTENT_TYPES', (
     'text/css',
     'application/javascript',
-    'application/x-javascript'
+    'application/x-javascript',
 ))
 
 if IS_GZIPPED:
     from gzip import GzipFile
 
+
 def safe_join(base, *paths):
     """
     A version of django.utils._os.safe_join for S3 paths.
 
-    Joins one or more path components to the base path component intelligently.
-    Returns a normalized version of the final path.
+    Joins one or more path components to the base path component
+    intelligently. Returns a normalized version of the final path.
 
-    The final path must be located inside of the base path component (otherwise
-    a ValueError is raised).
+    The final path must be located inside of the base path component
+    (otherwise a ValueError is raised).
 
-    Paths outside the base path indicate a possible security sensitive operation.
+    Paths outside the base path indicate a possible security
+    sensitive operation.
     """
     from urlparse import urljoin
     base_path = force_unicode(base)
     paths = map(lambda p: force_unicode(p), paths)
-    final_path = urljoin(base_path + ("/" if not base_path.endswith("/") else ""), *paths)
+    final_path = urljoin(base_path +
+        ("/" if not base_path.endswith("/") else ""), *paths)
     # Ensure final_path starts with base_path and that the next character after
     # the final path is '/' (or nothing, in which case final_path must be
     # equal to base_path).
     base_path_len = len(base_path)
     if not final_path.startswith(base_path) \
-       or final_path[base_path_len:base_path_len+1] not in ('', '/'):
+       or final_path[base_path_len:base_path_len + 1] not in ('', '/'):
         raise ValueError('the joined path is located outside of the base path'
                          ' component')
     return final_path
 
+
 class S3BotoStorage(Storage):
     """Amazon Simple Storage Service using Boto"""
 
     def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
-                       secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
-                       gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
-                       querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
-                       reduced_redundancy=REDUCED_REDUNDANCY,
-                       custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
-                       location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
-                       preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):
+            secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL,
+            headers=HEADERS, gzip=IS_GZIPPED,
+            gzip_content_types=GZIP_CONTENT_TYPES,
+            querystring_auth=QUERYSTRING_AUTH,
+            querystring_expire=QUERYSTRING_EXPIRE,
+            reduced_redundancy=REDUCED_REDUNDANCY,
+            custom_domain=CUSTOM_DOMAIN,
+            secure_urls=SECURE_URLS,
+            location=LOCATION,
+            file_name_charset=FILE_NAME_CHARSET,
+            preload_metadata=PRELOAD_METADATA,
+            calling_format=CALLING_FORMAT):
         self.bucket_acl = bucket_acl
         self.bucket_name = bucket
         self.acl = acl
         self.file_name_charset = file_name_charset
 
         if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
+            access_key, secret_key = self._get_access_keys()
 
-        self.connection = S3Connection(access_key, secret_key, calling_format=calling_format)
+        self.connection = S3Connection(access_key, secret_key,
+            calling_format=calling_format)
         self._entries = {}
 
     @property
     def _get_or_create_bucket(self, name):
         """Retrieves a bucket if it exists, otherwise creates it."""
         try:
-            return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
-        except S3ResponseError, e:
+            return self.connection.get_bucket(name,
+                validate=AUTO_CREATE_BUCKET)
+        except S3ResponseError:
             if AUTO_CREATE_BUCKET:
                 bucket = self.connection.create_bucket(name)
                 bucket.set_acl(self.bucket_acl)
                 return bucket
-            raise ImproperlyConfigured, ("Bucket specified by "
-            "AWS_STORAGE_BUCKET_NAME does not exist. Buckets can be "
-            "automatically created by setting AWS_AUTO_CREATE_BUCKET=True")
+            raise ImproperlyConfigured("Bucket specified by "
+                "AWS_STORAGE_BUCKET_NAME does not exist. "
+                "Buckets can be automatically created by setting "
+                "AWS_AUTO_CREATE_BUCKET=True")
 
     def _clean_name(self, name):
         # Useful for windows' paths
         try:
             return safe_join(self.location, name).lstrip('/')
         except ValueError:
-            raise SuspiciousOperation("Attempted access to '%s' denied." % name)
+            raise SuspiciousOperation("Attempted access to '%s' denied." %
+                                      name)
 
     def _encode_name(self, name):
         return smart_str(name, encoding=self.file_name_charset)
         cleaned_name = self._clean_name(name)
         name = self._normalize_name(cleaned_name)
         headers = self.headers.copy()
-        content_type = getattr(content,'content_type', mimetypes.guess_type(name)[0] or Key.DefaultContentType)
+        content_type = getattr(content, 'content_type',
+            mimetypes.guess_type(name)[0] or Key.DefaultContentType)
 
         if self.gzip and content_type in self.gzip_content_types:
             content = self._compress_content(content)
             elif len(parts) > 1:
                 # Directory
                 dirs.add(parts[0])
-        return list(dirs),files
+        return list(dirs), files
 
     def size(self, name):
         name = self._normalize_name(self._clean_name(name))
 
     def modified_time(self, name):
         try:
-           from dateutil import parser, tz
+            from dateutil import parser, tz
         except ImportError:
             raise NotImplementedError()
         name = self._normalize_name(self._clean_name(name))
     def url(self, name):
         name = self._normalize_name(self._clean_name(name))
         if self.custom_domain:
-            return "%s://%s/%s" % ('https' if self.secure_urls else 'http', self.custom_domain, name)
-        else:
-            return self.connection.generate_url(self.querystring_expire, method='GET', \
-                    bucket=self.bucket.name, key=self._encode_name(name), query_auth=self.querystring_auth, \
-                    force_http=not self.secure_urls)
+            return "%s://%s/%s" % ('https' if self.secure_urls else 'http',
+                                   self.custom_domain, name)
+        return self.connection.generate_url(self.querystring_expire,
+            method='GET', bucket=self.bucket.name, key=self._encode_name(name),
+            query_auth=self.querystring_auth, force_http=not self.secure_urls)
 
     def get_available_name(self, name):
         """ Overwrite existing file with the same name. """
 
     def close(self):
         if self._is_dirty:
-            self.key.set_contents_from_file(self._file, headers=self._storage.headers, policy=self._storage.acl)
+            self.key.set_contents_from_file(self._file,
+                headers=self._storage.headers, policy=self._storage.acl)
         self.key.close()
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.