Commits

David Larlet  committed 106927c

S3: chunk support still in progress but it looks very ugly now, design decision is needed at this point. The new test suite is a bit broken, I know.

  • Participants
  • Parent commits 0131d5d

Comments (0)

Files changed (3)

File S3Storage.py

 
     def _open(self, name, mode='rb'):
         response = self.connection.get(self.bucket, name)
+        sizer = self.size
+        reader = self._read
         writer = curry(self._put_file, name)
-        #print response.object.data
-        remote_file = S3StorageFile(response.object.data, mode, writer)
-        remote_file.size = self.size(name)
+        remote_file = S3StorageFile(name, response.object.data, mode, sizer, reader, writer)
         return remote_file
 
+    def _read(self, name, num_bytes=None):
+        if num_bytes is None:
+            headers = {}
+        else:
+            headers = {'Range': 'bytes=0-%s' % (num_bytes-1,)}
+        response = self.connection.get(self.bucket, name, headers)
+        return response.object.data
+        
     def _save(self, name, content):
         if hasattr(content, 'chunks'):
             content_str = ''.join(chunk for chunk in content.chunks())
 
 
 class S3StorageFile(File):
-    def __init__(self, data, mode, writer):
+    def __init__(self, name, data, mode, sizer, reader, writer):
+        self._name = name
         self._mode = mode
+        self._size_from_storage = sizer
+        self._read_from_storage = reader
         self._write_to_storage = writer
         self._is_dirty = False
         self.file = StringIO(data)
 
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._size_from_storage(self._name)
+        return self._size
+
     def read(self, num_bytes=None):
+        self.file = StringIO(self._read_from_storage(self._name, num_bytes))
         return self.file.getvalue()
 
     def write(self, content):

File storages_tests/models.py

 
+import tempfile
+
+from django.db import models
+from django.core.files.base import ContentFile
+from django.core.files.storage import default_storage as s3_storage
+from django.core.cache import cache
+
+# Write out a file to be used as default content
+s3_storage.save('tests/default.txt', ContentFile('default content'))
+
+class MyStorage(models.Model):
+    def custom_upload_to(self, filename):
+        return 'foo'
+
+    def random_upload_to(self, filename):
+        # This returns a different result each time,
+        # to make sure it only gets called once.
+        import random
+        return '%s/%s' % (random.randint(100, 999), filename)
+
+    normal = models.FileField(storage=s3_storage, upload_to='tests')
+    custom = models.FileField(storage=s3_storage, upload_to=custom_upload_to)
+    random = models.FileField(storage=s3_storage, upload_to=random_upload_to)
+    default = models.FileField(storage=s3_storage, upload_to='tests', default='tests/default.txt')

File storages_tests/tests.py

 Settings
 --------
 
-Required
-~~~~~~~~
+``DEFAULT_FILE_STORAGE``
+~~~~~~~~~~~~~~~~~~~~~~~~
 
-First of all you have to specify S3 access stuff::
+This setting store the path to the S3 storage class, the first part correspond
+to the filepath and the second the name of the class, if you've got
+``example.com`` in your ``PYTHONPATH`` and store your storage file in
+``example.com/libs/storages/S3Storage.py``, the resulting setting will be::
 
-    DEFAULT_FILE_STORAGE = 'S3Storage.S3Storage'
-    AWS_ACCESS_KEY_ID = 'foo'
-    AWS_SECRET_ACCESS_KEY = 'bar'
-    AWS_STORAGE_BUCKET_NAME = 'baz'
+    DEFAULT_FILE_STORAGE = 'libs.storages.S3Storage.S3Storage'
 
+If you keep the same filename as in repository, it should always end with 
+``S3Storage.S3Storage``.
 
-Optionnal
-~~~~~~~~~
+``AWS_ACCESS_KEY_ID``
+~~~~~~~~~~~~~~~~~~~~~
 
-And optionnally, you can set custom settings::
+Your Amazon Web Services access key, as a string.
+
+``AWS_SECRET_ACCESS_KEY``
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Your Amazon Web Services secret access key, as a string.
+
+``AWS_STORAGE_BUCKET_NAME``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Your Amazon Web Services storage bucket name, as a string.
+
+``AWS_CALLING_FORMAT``
+~~~~~~~~~~~~~~~~~~~~~~
+
+The way you'd like to call the Amazon Web Services API, for instance if you
+prefer subdomains::
 
     from S3 import CallingFormat
     AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
+
+``AWS_HEADERS`` (optionnal)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you'd like to set headers sent with each file of the storage::
+
     # see http://developer.yahoo.com/performance/rules.html#expires
     AWS_HEADERS = {
         'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', 
         'Cache-Control': 'max-age=86400',
         }
 
+
 Fields
 ------
 
 This way, if you define a new ``FileField``, it will use the S3 storage::
 
     >>> from django.db import models
-    >>> class MyModel(models.Model):
-    ...     myfile = models.FileField(upload_to='yourpath')
+    >>> class Resume(models.Model):
+    ...     pdf = models.FileField(upload_to='pdfs')
+    ...     photos = models.ImageField(upload_to='photos')
     ...
-    >>> mymodel = MyModel()
-    >>> print mymodel.myfile.storage
+    >>> resume = Resume()
+    >>> print resume.pdf.storage
     <S3Storage.S3Storage object at ...>
 
 
 
 Initialization::
 
-    >>> from django.core.files.storage import default_storage as s3_storage
+    >>> from django.core.files.storage import default_storage
+    >>> from django.core.files.base import ContentFile
+    >>> from django.core.cache import cache
+    >>> from models import MyStorage
+
+Storage
+-------
 
 Standard file access options are available, and work as expected::
 
-    >>> s3_storage.exists('storage_test')
+    >>> default_storage.exists('storage_test')
     False
-    >>> file = s3_storage.open('storage_test', 'w')
+    >>> file = default_storage.open('storage_test', 'w')
     >>> file.write('storage contents')
     >>> file.close()
     
-    >>> s3_storage.exists('storage_test')
+    >>> default_storage.exists('storage_test')
     True
-    >>> file = s3_storage.open('storage_test', 'r')
+    >>> file = default_storage.open('storage_test', 'r')
     >>> file.read()
     'storage contents'
     >>> file.close()
     
-    >>> s3_storage.delete('storage_test')
-    >>> s3_storage.exists('storage_test')
+    >>> default_storage.delete('storage_test')
+    >>> default_storage.exists('storage_test')
     False
 
+Model
+-----
+
+An object without a file has limited functionality::
+    
+    >>> obj1 = MyStorage()
+    >>> obj1.normal
+    <FieldFile: None>
+    >>> obj1.normal.size
+    Traceback (most recent call last):
+    ...
+    ValueError: The 'normal' attribute has no file associated with it.
+    
+Saving a file enables full functionality::
+    
+    >>> obj1.normal.save('django_test.txt', ContentFile('content'))
+    >>> obj1.normal
+    <FieldFile: tests/django_test.txt>
+    >>> obj1.normal.size
+    7
+    >>> obj1.normal.read()
+    'content'
+    
+Files can be read in a little at a time, if necessary::
+    
+    >>> obj1.normal.open()
+    >>> obj1.normal.read(3)
+    'con'
+    >>> obj1.normal.read()
+    'tent'
+    >>> '-'.join(obj1.normal.chunks(chunk_size=2))
+    'co-nt-en-t'
+    
+Save another file with the same name::
+    
+    >>> obj2 = MyStorage()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+    >>> obj2.normal.size
+    12
+    
+Push the objects into the cache to make sure they pickle properly::
+    
+    >>> cache.set('obj1', obj1)
+    >>> cache.set('obj2', obj2)
+    >>> cache.get('obj2').normal
+    <FieldFile: tests/django_test_.txt>
+    
+Deleting an object deletes the file it uses, if there are no other objects
+still using that file::
+    
+    >>> obj2.delete()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+    
+Default values allow an object to access a single file::
+    
+    >>> obj3 = MyStorage.objects.create()
+    >>> obj3.default
+    <FieldFile: tests/default.txt>
+    >>> obj3.default.read()
+    'default content'
+    
+But it shouldn't be deleted, even if there are no more objects using it::
+    
+    >>> obj3.delete()
+    >>> obj3 = MyStorage()
+    >>> obj3.default.read()
+    'default content'
+    
+Verify the fix for #5655, making sure the directory is only determined once::
+    
+    >>> obj4 = MyStorage()
+    >>> obj4.random.save('random_file', ContentFile('random content'))
+    >>> obj4.random
+    <FieldFile: .../random_file>
+    
+Clean up the temporary files::
+    
+    >>> obj1.normal.delete()
+    >>> obj2.normal.delete()
+    >>> obj3.default.delete()
+    >>> obj4.random.delete()
+
 """