Commits

Renato Pedigoni committed 7b9b627

set_contents_from_file, rewind=True

Comments (0)

Files changed (1)

storages/backends/s3boto.py

 class S3BotoStorage(Storage):
     """
     Amazon Simple Storage Service using Boto
-    
+
     This storage backend supports opening files in read or write
     mode and supports streaming(buffering) data in chunks to S3
     when writing.
     def _get_access_keys(self):
         """
         Gets the access keys to use when accessing S3. If none
-        are provided to the class in the constructor or in the 
+        are provided to the class in the constructor or in the
         settings then get them from the environment variables.
         """
         access_key = ACCESS_KEY_NAME
 
         key.set_metadata('Content-Type', content_type)
         key.set_contents_from_file(content, headers=headers, policy=self.acl,
-                                 reduced_redundancy=self.reduced_redundancy)
+                                 reduced_redundancy=self.reduced_redundancy, rewind=True)
         return cleaned_name
 
     def delete(self, name):
 class S3BotoStorageFile(File):
     """
     The default file object used by the S3BotoStorage backend.
-    
+
     This file implements file streaming using boto's multipart
     uploading functionality. The file can be opened in read or
     write mode.
     in your application.
     """
     # TODO: Read/Write (rw) mode may be a bit undefined at the moment. Needs testing.
-    # TODO: When Django drops support for Python 2.5, rewrite to use the 
+    # TODO: When Django drops support for Python 2.5, rewrite to use the
     #       BufferedIO streams in the Python 2.6 io module.
 
     def __init__(self, name, mode, storage, buffer_size=FILE_BUFFER_SIZE):
         self._multipart = None
         # 5 MB is the minimum part size (if there is more than one part).
         # Amazon allows up to 10,000 parts.  The default supports uploads
-        # up to roughly 50 GB.  Increase the part size to accommodate 
+        # up to roughly 50 GB.  Increase the part size to accommodate
         # for files larger than this.
         self._write_buffer_size = buffer_size
         self._write_counter = 0