Commits

Tobias McNulty committed c30b326 Merge

merge in upstream changes

Comments (0)

Files changed (10)

 0054d538ccb5e482e07fe43ae47b8d8c82f00818 1.1.1
 0054d538ccb5e482e07fe43ae47b8d8c82f00818 1.1.1
 374f6d14e3073d3f3ad4842fe906507493931713 1.1.1
+3e8e477cf67db87f3ac796e9743b81b71867fd0d 1.1.4
 django-storages change log
 ==========================
 
+1.1.4 (2012-01-06)
+******************
+
+* Added PendingDeprecationWarning for mosso backend
+* Merged pull request `#13`_ from marcoala, adds ``SFTP_KNOWN_HOST_FILE`` setting to SFTP storage backend
+* Merged pull request `#12`_ from ryankask, fixes HashPathStorage tests that delete remote media
+* Merged pull request `#10`_ from key, adds support for django-mongodb-engine 0.4.0 or later, fixes GridFS file deletion bug
+* Fixed S3BotoStorage performance problem calling modified_time()
+* Added deprecation warning for s3 backend, refs `#40`_
+* Fixed CLOUDFILES_CONNECTION_KWARGS import error, fixes `#78`_
+* Switched to sphinx documentation, set official docs up on http://django-storages.rtfd.org/
+* HashPathStorage uses self.exists now, fixes `#83`_
+
+.. _#13: https://bitbucket.org/david/django-storages/pull-request/13/a-version-of-sftp-storage-that-allows-you
+.. _#12: https://bitbucket.org/david/django-storages/pull-request/12/hashpathstorage-tests-deleted-my-projects
+.. _#10: https://bitbucket.org/david/django-storages/pull-request/10/support-django-mongodb-engine-040
+.. _#40: https://bitbucket.org/david/django-storages/issue/40/deprecate-s3py-backend
+.. _#78: https://bitbucket.org/david/django-storages/issue/78/import-error
+.. _#83: https://bitbucket.org/david/django-storages/issue/6/symlinkorcopystorage-new-custom-storage
+
 1.1.3 (2011-08-15)
 ******************
 
 from setuptools import setup, find_packages
 import storages
- 
+
 setup(
     name = 'django-storages',
     version = storages.__version__,
     packages = find_packages(),
-    
+
     author = 'David Larlet',
     author_email = 'david@larlet.fr',
     license = 'BSD',
     description = 'Support for many storages (S3, MogileFS, etc) in Django.',
     url='http://code.welldev.org/django-storages/',
-    download_url = "http://bitbucket.org/david/django-storages/get/tip.tar.gz",
     classifiers = [
-        'Development Status :: 4 - Beta',
+        'Development Status :: 5 - Production/Stable',
         'Environment :: Web Environment',
         'Intended Audience :: Developers',
         'License :: OSI Approved :: BSD License',

storages/__init__.py

-__version__ = '1.1.3'
+__version__ = '1.1.4'

storages/backends/mosso.py

 Created by Rich Leland <rich@richleland.com>.
 """
 import os
+import warnings
+warnings.simplefilter('always', PendingDeprecationWarning)
+warnings.warn("The mosso module will be deprecated in version 1.2 of "
+              "django-storages. The CloudFiles code has been moved into"
+              "django-cumulus at http://github.com/richleland/django-cumulus.",
+              PendingDeprecationWarning)
 
 from django.conf import settings
 from django.core.exceptions import ImproperlyConfigured

storages/backends/s3.py

File contents unchanged.

storages/backends/s3boto.py

 
     The final path must be located inside of the base path component (otherwise
     a ValueError is raised).
-    
+
     Paths outside the base path indicate a possible security sensitive operation.
     """
     from urlparse import urljoin
         self.location = location or ''
         self.location = self.location.lstrip('/')
         self.file_name_charset = file_name_charset
-        
+
         if not access_key and not secret_key:
              access_key, secret_key = self._get_access_keys()
-        
+
         self.connection = S3Connection(access_key, secret_key, calling_format=calling_format)
         self._entries = {}
 
         if (access_key or secret_key) and (not access_key or not secret_key):
             access_key = os.environ.get(ACCESS_KEY_NAME)
             secret_key = os.environ.get(SECRET_KEY_NAME)
-        
+
         if access_key and secret_key:
             # Both were provided, so use them
             return access_key, secret_key
-        
+
         return None, None
-    
+
     def _get_or_create_bucket(self, name):
         """Retrieves a bucket if it exists, otherwise creates it."""
         try:
             raise ImproperlyConfigured, ("Bucket specified by "
             "AWS_STORAGE_BUCKET_NAME does not exist. Buckets can be "
             "automatically created by setting AWS_AUTO_CREATE_BUCKET=True")
-    
+
     def _clean_name(self, name):
         # Useful for windows' paths
         return os.path.normpath(name).replace('\\', '/')
         zfile.close()
         content.file = zbuf
         return content
-        
+
     def _open(self, name, mode='rb'):
         name = self._normalize_name(self._clean_name(name))
         f = S3BotoStorageFile(name, mode, self)
         if not f.key:
             raise IOError('File does not exist: %s' % name)
         return f
-    
+
     def _save(self, name, content):
         cleaned_name = self._clean_name(name)
         name = self._normalize_name(cleaned_name)
             k = self.bucket.new_key(self._encode_name(name))
 
         k.set_metadata('Content-Type',content_type)
-        k.set_contents_from_file(content, headers=headers, policy=self.acl, 
+        k.set_contents_from_file(content, headers=headers, policy=self.acl,
                                  reduced_redundancy=self.reduced_redundancy,
                                  encrypt_key=self.encryption)
         return cleaned_name
-    
+
     def delete(self, name):
         name = self._normalize_name(self._clean_name(name))
         self.bucket.delete_key(self._encode_name(name))
-    
+
     def exists(self, name):
         name = self._normalize_name(self._clean_name(name))
         if self.entries:
             return name in self.entries
         k = self.bucket.new_key(self._encode_name(name))
         return k.exists()
-    
+
     def listdir(self, name):
         name = self._normalize_name(self._clean_name(name))
         dirlist = self.bucket.list(self._encode_name(name))
             parts = item.name.split("/")
             parts = parts[len(base_parts):]
             if len(parts) == 1:
-                # File 
+                # File
                 files.append(parts[0])
             elif len(parts) > 1:
                 # Directory
             name = self._clean_name(name)
             return name
         return super(S3BotoStorage, self).get_available_name(name)
-        
+
 
 class S3BotoStorageFile(File):
     def __init__(self, name, mode, storage):

storages/backends/sftpstorage.py

 #
 # SFTP_STORAGE_GID (Optional) - gid of the group that should be set on the
 # files on the remote host.  You have to be a member of the group to set this.
+# SFTP_KNOWN_HOST_FILE (Optional) - absolute path of know host file, if it isn't
+# set "~/.ssh/known_hosts" will be used
 
 
 import os
 
         self._uid = getattr(settings, 'SFTP_STORAGE_UID', None)
         self._gid = getattr(settings, 'SFTP_STORAGE_GID', None)
+        self._known_host_file = getattr(settings, 'SFTP_KNOWN_HOST_FILE', None)
 
         self._root_path = settings.SFTP_STORAGE_ROOT
 
     def _connect(self):
         self._ssh = paramiko.SSHClient()
 
-        # automatically add host keys from current user.
-        self._ssh.load_host_keys(os.path.expanduser(os.path.join("~", ".ssh", "known_hosts")))
+        if self._known_host_file is not None:
+            self._ssh.load_host_keys(self._known_host_file)
+        else:
+            # automatically add host keys from current user.
+            self._ssh.load_host_keys(os.path.expanduser(os.path.join("~", ".ssh", "known_hosts")))
 
         # and automatically add new host keys for hosts we haven't seen before.
         self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())

storages/models.py

+from django.conf import settings
+
+
+if settings.DEFAULT_FILE_STORAGE.endswith('mosso.CloudFilesStorage'):
+    import warnings
+    warnings.simplefilter('always', PendingDeprecationWarning)
+    warnings.warn("The mosso module will be deprecated in version 1.2 of "
+                  "django-storages. The CloudFiles code has been moved into"
+                  "django-cumulus at http://github.com/richleland/django-cumulus.",
+                  PendingDeprecationWarning)
+

storages/tests/hashpath.py

 
 from storages.backends.hashpath import HashPathStorage
 
+TEST_PATH_PREFIX = 'django-storages-test'
+
 
 class HashPathStorageTest(TestCase):
 
     def setUp(self):
-        self.storage = HashPathStorage()
-        
+        self.test_path = os.path.join(settings.MEDIA_ROOT, TEST_PATH_PREFIX)
+        self.storage = HashPathStorage(location=self.test_path)
+
         # make sure the profile upload folder exists
-        if not os.path.exists(settings.MEDIA_ROOT):
-            os.makedirs(settings.MEDIA_ROOT)
-            
+        if not os.path.exists(self.test_path):
+            os.makedirs(self.test_path)
+
     def tearDown(self):
         # remove uploaded profile picture
-        if os.path.exists(settings.MEDIA_ROOT):
-            shutil.rmtree(settings.MEDIA_ROOT)
+        if os.path.exists(self.test_path):
+            shutil.rmtree(self.test_path)
 
     def test_save_same_file(self):
         """