Commits

Wes Winham committed f12e3be

Fixed safety checking on url joining when the base path ends with a /

Comments (0)

Files changed (1)

storages/backends/s3boto.py

 
     The final path must be located inside of the base path component (otherwise
     a ValueError is raised).
-    
+
     Paths outside the base path indicate a possible security sensitive operation.
     """
     from urlparse import urljoin
     # the final path is '/' (or nothing, in which case final_path must be
     # equal to base_path).
     base_path_len = len(base_path)
-    if not final_path.startswith(base_path) \
-       or final_path[base_path_len:base_path_len+1] not in ('', '/'):
+    if not final_path.startswith(base_path):
         raise ValueError('the joined path is located outside of the base path'
                          ' component')
     return final_path
 
 class S3BotoStorage(Storage):
     """Amazon Simple Storage Service using Boto"""
-    
+
     def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                        secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                        gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
         self.location = location or ''
         self.location = self.location.lstrip('/')
         self.file_name_charset = file_name_charset
-        
+
         if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-        
+            access_key, secret_key = self._get_access_keys()
+
         self.connection = S3Connection(access_key, secret_key)
         self.bucket = self._get_or_create_bucket(bucket)
         self.bucket.set_acl(self.bucket_acl)
-    
+
     def _get_access_keys(self):
         access_key = ACCESS_KEY_NAME
         secret_key = SECRET_KEY_NAME
         if (access_key or secret_key) and (not access_key or not secret_key):
             access_key = os.environ.get(ACCESS_KEY_NAME)
             secret_key = os.environ.get(SECRET_KEY_NAME)
-        
+
         if access_key and secret_key:
             # Both were provided, so use them
             return access_key, secret_key
-        
+
         return None, None
-    
+
     def _get_or_create_bucket(self, name):
         """Retrieves a bucket if it exists, otherwise creates it."""
         try:
             raise ImproperlyConfigured, ("Bucket specified by "
             "AWS_STORAGE_BUCKET_NAME does not exist. Buckets can be "
             "automatically created by setting AWS_AUTO_CREATE_BUCKET=True")
-    
+
     def _clean_name(self, name):
         # Useful for windows' paths
         return os.path.normpath(name).replace('\\', '/')
         zfile.close()
         content.file = zbuf
         return content
-        
+
     def _open(self, name, mode='rb'):
         name = self._normalize_name(self._clean_name(name))
         f = S3BotoStorageFile(name, mode, self)
         if not f.key:
             raise IOError('File does not exist: %s' % name)
         return f
-    
+
     def _save(self, name, content):
         cleaned_name = self._clean_name(name)
         name = self._normalize_name(cleaned_name)
             k = self.bucket.new_key(self._encode_name(name))
 
         k.set_metadata('Content-Type',content_type)
-        k.set_contents_from_file(content, headers=headers, policy=self.acl, 
+        k.set_contents_from_file(content, headers=headers, policy=self.acl,
                                  reduced_redundancy=self.reduced_redundancy)
         return cleaned_name
-    
+
     def delete(self, name):
         name = self._normalize_name(self._clean_name(name))
         self.bucket.delete_key(self._encode_name(name))
-    
+
     def exists(self, name):
         name = self._normalize_name(self._clean_name(name))
         k = self.bucket.new_key(self._encode_name(name))
         return k.exists()
-    
+
     def listdir(self, name):
         name = self._normalize_name(self._clean_name(name))
         dirlist = self.bucket.list(self._encode_name(name))
             parts = item.name.split("/")
             parts = parts[len(base_parts):]
             if len(parts) == 1:
-                # File 
+                # File
                 files.append(parts[0])
             elif len(parts) > 1:
                 # Directory
     def size(self, name):
         name = self._normalize_name(self._clean_name(name))
         return self.bucket.get_key(self._encode_name(name)).size
-    
+
     def url(self, name):
         name = self._normalize_name(self._clean_name(name))
         if self.custom_domain: