Ian Lewis avatar Ian Lewis committed 0265860

Made the streaming write buffer size into a setting

Comments (0)

Files changed (1)

storages/backends/s3boto.py

 SECURE_URLS = getattr(settings, 'AWS_S3_SECURE_URLS', True)
 FILE_NAME_CHARSET = getattr(settings, 'AWS_S3_FILE_NAME_CHARSET', 'utf-8')
 FILE_OVERWRITE = getattr(settings, 'AWS_S3_FILE_OVERWRITE', True)
+FILE_WRITE_BUFFER_SIZE = getattr(settings, 'AWS_S3_FILE_WRITE_BUFFER_SIZE', 5242880)
 IS_GZIPPED = getattr(settings, 'AWS_IS_GZIPPED', False)
 PRELOAD_METADATA = getattr(settings, 'AWS_PRELOAD_METADATA', False)
 GZIP_CONTENT_TYPES = getattr(settings, 'GZIP_CONTENT_TYPES', (
 
 
 class S3BotoStorageFile(File):
-    def __init__(self, name, mode, storage):
+    def __init__(self, name, mode, storage, buffer_size=FILE_WRITE_BUFFER_SIZE):
         self._storage = storage
         self.name = name[len(self._storage.location):].lstrip('/')
         self._mode = mode
         # Amazon allows up to 10,000 parts.  The default supports uploads
         # up to roughly 50 GB.  Increase the part size to accommodate 
         # for files larger than this.
-        self._write_buffer_size = 5242880
+        self._write_buffer_size = buffer_size
         self._write_counter = 0
 
     @property
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.