David Larlet avatar David Larlet committed a7a21b1

Rename storage modules (PEP8). Thanks Jannis Leidel.

Comments (0)

Files changed (14)

backends/DatabaseStorage.py

-# DatabaseStorage for django.
-# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
-
-
-from django.core.files.storage import Storage
-from django.core.files import File
-from django.conf import settings
-
-import StringIO
-import urlparse
-
-import pyodbc
-
-class DatabaseStorage(Storage):
-    """
-    Class DatabaseStorage provides storing files in the database. 
-    """
-
-    def __init__(self, option=settings.DB_FILES):
-        """Constructor. 
-        
-        Constructs object using dictionary either specified in contucotr or
-in settings.DB_FILES. 
-        
-        @param option dictionary with 'db_table', 'fname_column',
-'blob_column', 'size_column', 'base_url'  keys. 
-        
-        option['db_table']
-            Table to work with.
-        option['fname_column']
-            Column in the 'db_table' containing filenames (filenames can
-contain pathes). Values should be the same as where FileField keeps
-filenames. 
-            It is used to map filename to blob_column. In sql it's simply
-used in where clause. 
-        option['blob_column']
-            Blob column (for example 'image' type), created manually in the
-'db_table', used to store image.
-        option['size_column']
-            Column to store file size. Used for optimization of size()
-method (another way is to open file and get size)
-        option['base_url']
-            Url prefix used with filenames. Should be mapped to the view,
-that returns an image as result. 
-        """
-        
-        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
-                              and option.has_key('size_column') and option.has_key('base_url') ):
-            raise ValueError("You didn't specify required options")
-        self.db_table = option['db_table']
-        self.fname_column = option['fname_column']
-        self.blob_column = option['blob_column']
-        self.size_column = option['size_column']
-        self.base_url = option['base_url']
-
-        #get database settings
-        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
-        self.DATABASE_NAME = settings.DATABASE_NAME
-        self.DATABASE_USER = settings.DATABASE_USER
-        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
-        self.DATABASE_HOST = settings.DATABASE_HOST
-        
-        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
-                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
-        self.cursor = self.connection.cursor()
-
-    def _open(self, name, mode='rb'):
-        """Open a file from database. 
-        
-        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
-        If there is no such file in the db, returs None
-        """
-        
-        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
-
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
-        if row is None:
-            return None
-        inMemFile = StringIO.StringIO(row[0])
-        inMemFile.name = name
-        inMemFile.mode = mode
-        
-        retFile = File(inMemFile)
-        return retFile
-
-    def _save(self, name, content):
-        """Save 'content' as file named 'name'.
-        
-        @note '\' in path will be converted to '/'. 
-        """
-        
-        name = name.replace('\\', '/')
-        binary = pyodbc.Binary(content.read())
-        size = len(binary)
-        
-        #todo: check result and do something (exception?) if failed.
-        if self.exists(name):
-            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
-                                 (binary, size)  )
-        else:
-            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
-        self.connection.commit()
-        return name
-
-    def exists(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
-        return row is not None
-    
-    def get_available_name(self, name):
-        return name
-
-    def delete(self, name):
-        if self.exists(name):
-            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
-            self.connection.commit()
-
-    def url(self, name):
-        if self.base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
-    
-    def size(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
-        if row is None:
-            return 0
-        else:
-            return int(row[0])

backends/FTPStorage.py

-# FTP storage class for Django pluggable storage system.
-# Author: Rafal Jonca <jonca.rafal@gmail.com>
-# License: MIT
-# Comes from http://www.djangosnippets.org/snippets/1269/
-#
-# Usage:
-#
-# Add below to settings.py:
-# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
-#
-# In models.py you can write:
-# from FTPStorage import FTPStorage
-# fs = FTPStorage()
-# class FTPTest(models.Model):
-#     file = models.FileField(upload_to='a/b/c/', storage=fs)
-
-import os
-import ftplib
-import urlparse
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-
-class FTPStorageException(Exception): pass
-
-class FTPStorage(Storage):
-    """FTP Storage class for Django pluggable storage system."""
-
-    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
-        self._config = self._decode_location(location)
-        self._base_url = base_url
-        self._connection = None
-
-    def _decode_location(self, location):
-        """Return splitted configuration data from location."""
-        splitted_url = urlparse.urlparse(location)
-        config = {}
-        
-        if splitted_url.scheme not in ('ftp', 'aftp'):
-            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
-        if splitted_url.hostname == '':
-            raise ImproperlyConfigured('You must at least provide hostname!')
-            
-        if splitted_url.scheme == 'aftp':
-            config['active'] = True
-        else:
-            config['active'] = False
-        config['path'] = splitted_url.path
-        config['host'] = splitted_url.hostname
-        config['user'] = splitted_url.username
-        config['passwd'] = splitted_url.password
-        config['port'] = int(splitted_url.port)
-        
-        return config
-
-    def _start_connection(self):
-        # Check if connection is still alive and if not, drop it.
-        if self._connection is not None:
-            try:
-                self._connection.pwd()
-            except ftplib.all_errors, e:
-                self._connection = None
-        
-        # Real reconnect
-        if self._connection is None:
-            ftp = ftplib.FTP()
-            try:
-                ftp.connect(self._config['host'], self._config['port'])
-                ftp.login(self._config['user'], self._config['passwd'])
-                if self._config['active']:
-                    ftp.set_pasv(False)
-                if self._config['path'] != '':
-                    ftp.cwd(self._config['path'])
-                self._connection = ftp
-                return
-            except ftplib.all_errors, e:
-                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
-
-    def disconnect(self):
-        self._connection.quit()
-        self._connection = None
-
-    def _mkremdirs(self, path):
-        pwd = self._connection.pwd()
-        path_splitted = path.split('/')
-        for path_part in path_splitted:
-            try:
-                self._connection.cwd(path_part)
-            except:
-                try:
-                    self._connection.mkd(path_part)
-                    self._connection.cwd(path_part)
-                except ftplib.all_errors, e:
-                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
-        self._connection.cwd(pwd)
-        return
-
-    def _put_file(self, name, content):
-        # Connection must be open!
-        try:
-            self._mkremdirs(os.path.dirname(name))
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            self._connection.storbinary('STOR ' + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE)
-            self._connection.cwd(pwd)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error writing file %s' % name)
-
-    def _open(self, name, mode='rb'):
-        remote_file = FTPStorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name):
-        memory_file = StringIO()
-        try:
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
-            self._connection.cwd(pwd)
-            return memory_file
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error reading file %s' % name)
-        
-    def _save(self, name, content):
-        content.open()
-        self._start_connection()
-        self._put_file(name, content)
-        content.close()
-        return name
-
-    def _get_dir_details(self, path):
-        # Connection must be open!
-        try:
-            lines = []
-            self._connection.retrlines('LIST '+path, lines.append)
-            dirs = {}
-            files = {}
-            for line in lines:
-                words = line.split()
-                if len(words) < 6:
-                    continue
-                if words[-2] == '->':
-                    continue
-                if words[0][0] == 'd':
-                    dirs[words[-1]] = 0;
-                elif words[0][0] == '-':
-                    files[words[-1]] = int(words[-5]);
-            return dirs, files
-        except ftplib.all_errors, msg:
-            raise FTPStorageException('Error getting listing for %s' % path)
-
-    def listdir(self, path):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(path)
-            return dirs.keys(), files.keys()
-        except FTPStorageException, e:
-            raise
-
-    def delete(self, name):
-        if not self.exists(name):
-            return
-        self._start_connection()
-        try:
-            self._connection.delete(name)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when removing %s' % name)                 
-
-    def exists(self, name):
-        self._start_connection()
-        try:
-            if os.path.basename(name) in self._connection.nlst(os.path.dirname(name) + '/'):
-                return True
-            else:
-                return False
-        except ftplib.error_temp, e:
-            return False
-        except ftplib.error_perm, e:
-            # error_perm: 550 Can't find file
-            return False
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when testing existence of %s' % name)            
-
-    def size(self, name):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(os.path.dirname(name))
-            if os.path.basename(name) in files:
-                return files[os.path.basename(name)]
-            else:
-                return 0
-        except FTPStorageException, e:
-            return 0
-
-    def url(self, name):
-        if self._base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
-
-class FTPStorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self._is_read = False
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if not self._is_read:
-            self._storage._start_connection()
-            self.file = self._storage._read(self._name)
-            self._storage._end_connection()
-            self._is_read = True
-            
-        return self.file.read(num_bytes)
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-        self._is_read = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._start_connection()
-            self._storage._put_file(self._name, self.file.getvalue())
-            self._storage._end_connection()
-        self.file.close()

backends/ImageStorage.py

-
-import os
-from PIL import ImageFile as PILImageFile
-from django.core.files.storage import FileSystemStorage
-
-
-class ImageStorage(FileSystemStorage):
-    """
-    A FileSystemStorage which normalizes extensions for images.
-    
-    Comes from http://www.djangosnippets.org/snippets/965/
-    """
-    
-    def find_extension(self, format):
-        """Normalizes PIL-returned format into a standard, lowercase extension."""
-        format = format.lower()
-        
-        if format == 'jpeg':
-            format = 'jpg'
-        
-        return format
-    
-    def save(self, name, content):
-        dirname = os.path.dirname(name)
-        basename = os.path.basename(name)
-        
-        # Use PIL to determine filetype
-        
-        p = PILImageFile.Parser()
-        while 1:
-            data = content.read(1024)
-            if not data:
-                break
-            p.feed(data)
-            if p.image:
-                im = p.image
-                break
-        
-        extension = self.find_extension(im.format)
-        
-        # Does the basename already have an extension? If so, replace it.
-        # bare as in without extension
-        bare_basename = basename if '.' not in basename else basename[:basename.rindex('.')]
-        basename = bare_basename + '.' + extension
-        
-        name = os.path.join(dirname, basename)
-        return super(ImageStorage, self).save(name, content)
-    

backends/MogileFSStorage.py

-import urlparse
-from StringIO import StringIO
-import mimetypes
-
-from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
-from django.conf import settings
-from django.utils.text import force_unicode
-from django.http import HttpResponse, HttpResponseNotFound
-from django.core.cache import cache
-
-import mogilefs
-
-
-class MogileFSStorage(Storage):
-    """MogileFS filesystem storage"""
-    def __init__(self, base_url=settings.MEDIA_URL):
-        
-        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
-        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
-            self.base_url = settings.MOGILEFS_MEDIA_URL
-        else:
-            self.base_url = base_url
-                
-        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
-            if not hasattr(settings, var):
-                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
-            
-        self.trackers = settings.MOGILEFS_TRACKERS
-        self.domain = settings.MOGILEFS_DOMAIN
-        self.client = mogilefs.Client(self.domain, self.trackers)
-    
-    def get_mogile_paths(self, filename):
-        return self.client.get_paths(filename)  
-    
-    # The following methods define the Backend API
-
-    def filesize(self, filename):
-        raise NotImplemented
-        #return os.path.getsize(self._get_absolute_path(filename))
-    
-    def path(self, filename):
-        paths = self.get_mogile_paths(filename)
-        if paths:
-            return self.get_mogile_paths(filename)[0]
-        else:
-            return None
-    
-    def url(self, filename):
-        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
-
-    def open(self, filename, mode='rb'):
-        raise NotImplemented
-        #return open(self._get_absolute_path(filename), mode)
-
-    def exists(self, filename):
-        return filename in self.client
-
-    def save(self, filename, raw_contents):
-        filename = self.get_available_filename(filename)
-        
-        if not hasattr(self, 'mogile_class'):
-            self.mogile_class = None
-
-        # Write the file to mogile
-        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
-        if success:
-            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
-        else:
-            print "FAILURE writing file %s" % (filename)
-
-        return force_unicode(filename.replace('\\', '/'))
-
-    def delete(self, filename):
-        
-        self.client.delete(filename)
-            
-        
-def serve_mogilefs_file(request, key=None):
-    """
-    Called when a user requests an image.
-    Either reproxy the path to perlbal, or serve the image outright
-    """
-    # not the best way to do this, since we create a client each time
-    mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream"
-    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
-    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
-        # we're reproxying with perlbal
-        
-        # check the path cache
-        
-        path = cache.get(key)
-
-        if not path:
-            path = client.get_paths(key)
-            cache.set(key, path, 60)
-    
-        if path:
-            response = HttpResponse(content_type=mimetype)
-            response['X-REPROXY-URL'] = path[0]
-        else:
-            response = HttpResponseNotFound()
-    
-    else:
-        # we don't have perlbal, let's just serve the image via django
-        file_data = client[key]
-        if file_data:
-            response = HttpResponse(file_data, mimetype=mimetype)
-        else:
-            response = HttpResponseNotFound()
-    
-    return response

backends/OverwriteStorage.py

-import os
-
-from django.conf import settings
-from django.core.files.storage import FileSystemStorage
-
-class OverwriteStorage(FileSystemStorage):
-    
-    def get_available_name(self, name):
-        """
-        Returns a filename that's free on the target storage system, and
-        available for new content to be written to.
-        
-        Comes from http://www.djangosnippets.org/snippets/976/
-        (even if it already exists in S3Storage for ages)
-        """
-        # If the filename already exists, remove it as if it was a true file system
-        if self.exists(name):
-            os.remove(os.path.join(settings.MEDIA_ROOT, name))
-        return name

backends/S3BotoStorage.py

-import os
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-AWS_HEADERS     = 'AWS_HEADERS'
-AWS_BUCKET_NAME = 'AWS_STORAGE_BUCKET_NAME'
-
-AWS_BUCKET_PREFIX = getattr(settings, AWS_BUCKET_NAME, {})
-
-try:
-    from boto.s3.connection import S3Connection
-    from boto.s3.key import Key
-except ImportError:
-    raise ImproperlyConfigured, "Could not load Boto's S3 bindings."
-
-class S3BotoStorage(Storage):
-    """Amazon Simple Storage Service using Boto"""
-    
-    def __init__(self, bucket="root", bucketprefix=AWS_BUCKET_PREFIX, access_key=None, secret_key=None, acl='public-read'):
-        self.acl = acl
-        
-        if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-        
-        self.connection = S3Connection(access_key, secret_key)
-        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
-        self.headers = getattr(settings, AWS_HEADERS, {})
-    
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-        
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-        
-        return None, None
-    
-    def _open(self, name, mode='rb'):
-        return S3BotoStorageFile(name, mode, self)
-    
-    def _save(self, name, content):
-        k = self.bucket.get_key(name)
-        if not k:
-            k = self.bucket.new_key(name)
-        k.set_contents_from_file(content)
-        return name
-    
-    def delete(self, name):
-        self.bucket.delete_key(name)
-    
-    def exists(self, name):
-        k = Key(self.bucket, name)
-        return k.exists()
-    
-    def listdir(self, name):
-        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
-    
-    def size(self, name):
-        return self.bucket.get_key(name).size
-    
-    def url(self, name):
-        return self.bucket.get_key(name).generate_url(3600, method='GET')
-    
-    def get_available_name(self, name):
-        """ Overwrite existing file with the same name. """
-        return name
-
-class S3BotoStorageFile(File):
-    def __init__(self, name, mode, storage):
-        self._storage = storage
-        self._name = name
-        self._mode = mode
-        self.key = storage.bucket.get_key(name)
-    
-    def size(self):
-        return self.key.size
-    
-    def read(self, *args, **kwargs):
-        return self.key.read(*args, **kwargs)
-    
-    def write(self, content):
-        self.key.set_contents_from_string(content)
-    
-    def close(self):
-        self.key.close()
-

backends/S3Storage.py

-import os
-import mimetypes
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-HEADERS = 'AWS_HEADERS'
-
-DEFAULT_ACL= getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
-QUERYSTRING_ACTIVE= getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
-QUERYSTRING_EXPIRE= getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
-
-try:
-    from S3 import AWSAuthConnection, QueryStringAuthGenerator
-except ImportError:
-    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
-    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
-
-
-class S3Storage(Storage):
-    """Amazon Simple Storage Service"""
-
-    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
-            access_key=None, secret_key=None, acl=DEFAULT_ACL, 
-            calling_format=settings.AWS_CALLING_FORMAT):
-        self.bucket = bucket
-        self.acl = acl
-
-        if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-
-        self.connection = AWSAuthConnection(access_key, secret_key, 
-                            calling_format=calling_format)
-        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
-                            calling_format=calling_format, is_secure=False)
-        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
-        
-        self.headers = getattr(settings, HEADERS, {})
-
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-
-        return None, None
-
-    def _get_connection(self):
-        return AWSAuthConnection(*self._get_access_keys())
-
-    def _put_file(self, name, content):
-        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
-        self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
-        response = self.connection.put(self.bucket, name, content, self.headers)
-        if response.http_response.status != 200:
-            raise IOError("S3StorageError: %s" % response.message)
-
-    def _open(self, name, mode='rb'):
-        remote_file = S3StorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name, start_range=None, end_range=None):
-        if start_range is None:
-            headers = {}
-        else:
-            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
-        response = self.connection.get(self.bucket, name, headers)
-        if response.http_response.status != 200:
-            raise IOError("S3StorageError: %s" % response.message)
-        headers = response.http_response.msg
-        return response.object.data, headers.get('etag', None), headers.get('content-range', None)
-        
-    def _save(self, name, content):
-        content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
-        self._put_file(name, content_str)
-        return name
-    
-    def delete(self, name):
-        response = self.connection.delete(self.bucket, name)
-        if response.http_response.status != 204:
-            raise IOError("S3StorageError: %s" % response.message)
-
-    def exists(self, name):
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        return response.status == 200
-
-    def size(self, name):
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        content_length = response.getheader('Content-Length')
-        return content_length and int(content_length) or 0
-    
-    def url(self, name):
-        if QUERYSTRING_ACTIVE:
-            return self.generator.generate_url('GET', self.bucket, name)
-        else:
-            return self.generator.make_bare_url(self.bucket, name)
-
-    ## UNCOMMENT BELOW IF NECESSARY
-    #def get_available_name(self, name):
-    #    """ Overwrite existing file with the same name. """
-    #    return name
-
-
-class S3StorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self.start_range = 0
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if num_bytes is None:
-            args = []
-            self.start_range = 0
-        else:
-            args = [self.start_range, self.start_range+num_bytes-1]
-        data, etags, content_range = self._storage._read(self._name, *args)
-        if content_range is not None:
-            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
-            start_range, end_range = current_range.split('-', 1)
-            self._size, self.start_range = int(size), int(end_range)+1
-        self.file = StringIO(data)
-        return self.file.getvalue()
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._put_file(self._name, self.file.getvalue())
-        self.file.close()

backends/database.py

+# DatabaseStorage for django.
+# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
+
+
+from django.core.files.storage import Storage
+from django.core.files import File
+from django.conf import settings
+
+import StringIO
+import urlparse
+
+import pyodbc
+
+class DatabaseStorage(Storage):
+    """
+    Class DatabaseStorage provides storing files in the database. 
+    """
+
+    def __init__(self, option=settings.DB_FILES):
+        """Constructor. 
+        
+        Constructs object using dictionary either specified in contucotr or
+in settings.DB_FILES. 
+        
+        @param option dictionary with 'db_table', 'fname_column',
+'blob_column', 'size_column', 'base_url'  keys. 
+        
+        option['db_table']
+            Table to work with.
+        option['fname_column']
+            Column in the 'db_table' containing filenames (filenames can
+contain pathes). Values should be the same as where FileField keeps
+filenames. 
+            It is used to map filename to blob_column. In sql it's simply
+used in where clause. 
+        option['blob_column']
+            Blob column (for example 'image' type), created manually in the
+'db_table', used to store image.
+        option['size_column']
+            Column to store file size. Used for optimization of size()
+method (another way is to open file and get size)
+        option['base_url']
+            Url prefix used with filenames. Should be mapped to the view,
+that returns an image as result. 
+        """
+        
+        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
+                              and option.has_key('size_column') and option.has_key('base_url') ):
+            raise ValueError("You didn't specify required options")
+        self.db_table = option['db_table']
+        self.fname_column = option['fname_column']
+        self.blob_column = option['blob_column']
+        self.size_column = option['size_column']
+        self.base_url = option['base_url']
+
+        #get database settings
+        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
+        self.DATABASE_NAME = settings.DATABASE_NAME
+        self.DATABASE_USER = settings.DATABASE_USER
+        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
+        self.DATABASE_HOST = settings.DATABASE_HOST
+        
+        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
+                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
+        self.cursor = self.connection.cursor()
+
+    def _open(self, name, mode='rb'):
+        """Open a file from database. 
+        
+        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
+        If there is no such file in the db, returs None
+        """
+        
+        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
+
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
+        if row is None:
+            return None
+        inMemFile = StringIO.StringIO(row[0])
+        inMemFile.name = name
+        inMemFile.mode = mode
+        
+        retFile = File(inMemFile)
+        return retFile
+
+    def _save(self, name, content):
+        """Save 'content' as file named 'name'.
+        
+        @note '\' in path will be converted to '/'. 
+        """
+        
+        name = name.replace('\\', '/')
+        binary = pyodbc.Binary(content.read())
+        size = len(binary)
+        
+        #todo: check result and do something (exception?) if failed.
+        if self.exists(name):
+            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
+                                 (binary, size)  )
+        else:
+            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
+        self.connection.commit()
+        return name
+
+    def exists(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
+        return row is not None
+    
+    def get_available_name(self, name):
+        return name
+
+    def delete(self, name):
+        if self.exists(name):
+            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
+            self.connection.commit()
+
+    def url(self, name):
+        if self.base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+    
+    def size(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
+        if row is None:
+            return 0
+        else:
+            return int(row[0])
+# FTP storage class for Django pluggable storage system.
+# Author: Rafal Jonca <jonca.rafal@gmail.com>
+# License: MIT
+# Comes from http://www.djangosnippets.org/snippets/1269/
+#
+# Usage:
+#
+# Add below to settings.py:
+# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
+#
+# In models.py you can write:
+# from FTPStorage import FTPStorage
+# fs = FTPStorage()
+# class FTPTest(models.Model):
+#     file = models.FileField(upload_to='a/b/c/', storage=fs)
+
+import os
+import ftplib
+import urlparse
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+
+class FTPStorageException(Exception): pass
+
+class FTPStorage(Storage):
+    """FTP Storage class for Django pluggable storage system."""
+
+    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
+        self._config = self._decode_location(location)
+        self._base_url = base_url
+        self._connection = None
+
+    def _decode_location(self, location):
+        """Return splitted configuration data from location."""
+        splitted_url = urlparse.urlparse(location)
+        config = {}
+        
+        if splitted_url.scheme not in ('ftp', 'aftp'):
+            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
+        if splitted_url.hostname == '':
+            raise ImproperlyConfigured('You must at least provide hostname!')
+            
+        if splitted_url.scheme == 'aftp':
+            config['active'] = True
+        else:
+            config['active'] = False
+        config['path'] = splitted_url.path
+        config['host'] = splitted_url.hostname
+        config['user'] = splitted_url.username
+        config['passwd'] = splitted_url.password
+        config['port'] = int(splitted_url.port)
+        
+        return config
+
+    def _start_connection(self):
+        # Check if connection is still alive and if not, drop it.
+        if self._connection is not None:
+            try:
+                self._connection.pwd()
+            except ftplib.all_errors, e:
+                self._connection = None
+        
+        # Real reconnect
+        if self._connection is None:
+            ftp = ftplib.FTP()
+            try:
+                ftp.connect(self._config['host'], self._config['port'])
+                ftp.login(self._config['user'], self._config['passwd'])
+                if self._config['active']:
+                    ftp.set_pasv(False)
+                if self._config['path'] != '':
+                    ftp.cwd(self._config['path'])
+                self._connection = ftp
+                return
+            except ftplib.all_errors, e:
+                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
+
+    def disconnect(self):
+        self._connection.quit()
+        self._connection = None
+
+    def _mkremdirs(self, path):
+        pwd = self._connection.pwd()
+        path_splitted = path.split('/')
+        for path_part in path_splitted:
+            try:
+                self._connection.cwd(path_part)
+            except:
+                try:
+                    self._connection.mkd(path_part)
+                    self._connection.cwd(path_part)
+                except ftplib.all_errors, e:
+                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
+        self._connection.cwd(pwd)
+        return
+
+    def _put_file(self, name, content):
+        # Connection must be open!
+        try:
+            self._mkremdirs(os.path.dirname(name))
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.storbinary('STOR ' + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE)
+            self._connection.cwd(pwd)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error writing file %s' % name)
+
+    def _open(self, name, mode='rb'):
+        remote_file = FTPStorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name):
+        memory_file = StringIO()
+        try:
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
+            self._connection.cwd(pwd)
+            return memory_file
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error reading file %s' % name)
+        
+    def _save(self, name, content):
+        content.open()
+        self._start_connection()
+        self._put_file(name, content)
+        content.close()
+        return name
+
+    def _get_dir_details(self, path):
+        # Connection must be open!
+        try:
+            lines = []
+            self._connection.retrlines('LIST '+path, lines.append)
+            dirs = {}
+            files = {}
+            for line in lines:
+                words = line.split()
+                if len(words) < 6:
+                    continue
+                if words[-2] == '->':
+                    continue
+                if words[0][0] == 'd':
+                    dirs[words[-1]] = 0;
+                elif words[0][0] == '-':
+                    files[words[-1]] = int(words[-5]);
+            return dirs, files
+        except ftplib.all_errors, msg:
+            raise FTPStorageException('Error getting listing for %s' % path)
+
+    def listdir(self, path):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(path)
+            return dirs.keys(), files.keys()
+        except FTPStorageException, e:
+            raise
+
+    def delete(self, name):
+        if not self.exists(name):
+            return
+        self._start_connection()
+        try:
+            self._connection.delete(name)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when removing %s' % name)                 
+
+    def exists(self, name):
+        self._start_connection()
+        try:
+            if os.path.basename(name) in self._connection.nlst(os.path.dirname(name) + '/'):
+                return True
+            else:
+                return False
+        except ftplib.error_temp, e:
+            return False
+        except ftplib.error_perm, e:
+            # error_perm: 550 Can't find file
+            return False
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when testing existence of %s' % name)            
+
+    def size(self, name):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(os.path.dirname(name))
+            if os.path.basename(name) in files:
+                return files[os.path.basename(name)]
+            else:
+                return 0
+        except FTPStorageException, e:
+            return 0
+
+    def url(self, name):
+        if self._base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
+
+class FTPStorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self._is_read = False
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if not self._is_read:
+            self._storage._start_connection()
+            self.file = self._storage._read(self._name)
+            self._storage._end_connection()
+            self._is_read = True
+            
+        return self.file.read(num_bytes)
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+        self._is_read = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._start_connection()
+            self._storage._put_file(self._name, self.file.getvalue())
+            self._storage._end_connection()
+        self.file.close()

backends/image.py

+
+import os
+from PIL import ImageFile as PILImageFile
+from django.core.files.storage import FileSystemStorage
+
+
+class ImageStorage(FileSystemStorage):
+    """
+    A FileSystemStorage which normalizes extensions for images.
+    
+    Comes from http://www.djangosnippets.org/snippets/965/
+    """
+    
+    def find_extension(self, format):
+        """Normalizes PIL-returned format into a standard, lowercase extension."""
+        format = format.lower()
+        
+        if format == 'jpeg':
+            format = 'jpg'
+        
+        return format
+    
+    def save(self, name, content):
+        dirname = os.path.dirname(name)
+        basename = os.path.basename(name)
+        
+        # Use PIL to determine filetype
+        
+        p = PILImageFile.Parser()
+        while 1:
+            data = content.read(1024)
+            if not data:
+                break
+            p.feed(data)
+            if p.image:
+                im = p.image
+                break
+        
+        extension = self.find_extension(im.format)
+        
+        # Does the basename already have an extension? If so, replace it.
+        # bare as in without extension
+        bare_basename = basename if '.' not in basename else basename[:basename.rindex('.')]
+        basename = bare_basename + '.' + extension
+        
+        name = os.path.join(dirname, basename)
+        return super(ImageStorage, self).save(name, content)
+    

backends/mogile.py

+import urlparse
+from StringIO import StringIO
+import mimetypes
+
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+from django.conf import settings
+from django.utils.text import force_unicode
+from django.http import HttpResponse, HttpResponseNotFound
+from django.core.cache import cache
+
+import mogilefs
+
+
+class MogileFSStorage(Storage):
+    """MogileFS filesystem storage"""
+    def __init__(self, base_url=settings.MEDIA_URL):
+        
+        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
+        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
+            self.base_url = settings.MOGILEFS_MEDIA_URL
+        else:
+            self.base_url = base_url
+                
+        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
+            if not hasattr(settings, var):
+                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
+            
+        self.trackers = settings.MOGILEFS_TRACKERS
+        self.domain = settings.MOGILEFS_DOMAIN
+        self.client = mogilefs.Client(self.domain, self.trackers)
+    
+    def get_mogile_paths(self, filename):
+        return self.client.get_paths(filename)  
+    
+    # The following methods define the Backend API
+
+    def filesize(self, filename):
+        raise NotImplemented
+        #return os.path.getsize(self._get_absolute_path(filename))
+    
+    def path(self, filename):
+        paths = self.get_mogile_paths(filename)
+        if paths:
+            return self.get_mogile_paths(filename)[0]
+        else:
+            return None
+    
+    def url(self, filename):
+        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
+
+    def open(self, filename, mode='rb'):
+        raise NotImplemented
+        #return open(self._get_absolute_path(filename), mode)
+
+    def exists(self, filename):
+        return filename in self.client
+
+    def save(self, filename, raw_contents):
+        filename = self.get_available_filename(filename)
+        
+        if not hasattr(self, 'mogile_class'):
+            self.mogile_class = None
+
+        # Write the file to mogile
+        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
+        if success:
+            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
+        else:
+            print "FAILURE writing file %s" % (filename)
+
+        return force_unicode(filename.replace('\\', '/'))
+
+    def delete(self, filename):
+        
+        self.client.delete(filename)
+            
+        
+def serve_mogilefs_file(request, key=None):
+    """
+    Called when a user requests an image.
+    Either reproxy the path to perlbal, or serve the image outright
+    """
+    # not the best way to do this, since we create a client each time
+    mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream"
+    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
+    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
+        # we're reproxying with perlbal
+        
+        # check the path cache
+        
+        path = cache.get(key)
+
+        if not path:
+            path = client.get_paths(key)
+            cache.set(key, path, 60)
+    
+        if path:
+            response = HttpResponse(content_type=mimetype)
+            response['X-REPROXY-URL'] = path[0]
+        else:
+            response = HttpResponseNotFound()
+    
+    else:
+        # we don't have perlbal, let's just serve the image via django
+        file_data = client[key]
+        if file_data:
+            response = HttpResponse(file_data, mimetype=mimetype)
+        else:
+            response = HttpResponseNotFound()
+    
+    return response

backends/overwrite.py

+import os
+
+from django.conf import settings
+from django.core.files.storage import FileSystemStorage
+
+class OverwriteStorage(FileSystemStorage):
+    
+    def get_available_name(self, name):
+        """
+        Returns a filename that's free on the target storage system, and
+        available for new content to be written to.
+        
+        Comes from http://www.djangosnippets.org/snippets/976/
+        (even if it already exists in S3Storage for ages)
+        """
+        # If the filename already exists, remove it as if it was a true file system
+        if self.exists(name):
+            os.remove(os.path.join(settings.MEDIA_ROOT, name))
+        return name
+import os
+import mimetypes
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+HEADERS = 'AWS_HEADERS'
+
+DEFAULT_ACL= getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
+QUERYSTRING_ACTIVE= getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
+QUERYSTRING_EXPIRE= getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
+
+try:
+    from S3 import AWSAuthConnection, QueryStringAuthGenerator
+except ImportError:
+    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
+    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
+
+
+class S3Storage(Storage):
+    """Amazon Simple Storage Service"""
+
+    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
+            access_key=None, secret_key=None, acl=DEFAULT_ACL, 
+            calling_format=settings.AWS_CALLING_FORMAT):
+        self.bucket = bucket
+        self.acl = acl
+
+        if not access_key and not secret_key:
+             access_key, secret_key = self._get_access_keys()
+
+        self.connection = AWSAuthConnection(access_key, secret_key, 
+                            calling_format=calling_format)
+        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
+                            calling_format=calling_format, is_secure=False)
+        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
+        
+        self.headers = getattr(settings, HEADERS, {})
+
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+
+        return None, None
+
+    def _get_connection(self):
+        return AWSAuthConnection(*self._get_access_keys())
+
+    def _put_file(self, name, content):
+        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
+        self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
+        response = self.connection.put(self.bucket, name, content, self.headers)
+        if response.http_response.status != 200:
+            raise IOError("S3StorageError: %s" % response.message)
+
+    def _open(self, name, mode='rb'):
+        remote_file = S3StorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name, start_range=None, end_range=None):
+        if start_range is None:
+            headers = {}
+        else:
+            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
+        response = self.connection.get(self.bucket, name, headers)
+        if response.http_response.status != 200:
+            raise IOError("S3StorageError: %s" % response.message)
+        headers = response.http_response.msg
+        return response.object.data, headers.get('etag', None), headers.get('content-range', None)
+        
+    def _save(self, name, content):
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        self._put_file(name, content_str)
+        return name
+    
+    def delete(self, name):
+        response = self.connection.delete(self.bucket, name)
+        if response.http_response.status != 204:
+            raise IOError("S3StorageError: %s" % response.message)
+
+    def exists(self, name):
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        return response.status == 200
+
+    def size(self, name):
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        content_length = response.getheader('Content-Length')
+        return content_length and int(content_length) or 0
+    
+    def url(self, name):
+        if QUERYSTRING_ACTIVE:
+            return self.generator.generate_url('GET', self.bucket, name)
+        else:
+            return self.generator.make_bare_url(self.bucket, name)
+
+    ## UNCOMMENT BELOW IF NECESSARY
+    #def get_available_name(self, name):
+    #    """ Overwrite existing file with the same name. """
+    #    return name
+
+
+class S3StorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self.start_range = 0
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if num_bytes is None:
+            args = []
+            self.start_range = 0
+        else:
+            args = [self.start_range, self.start_range+num_bytes-1]
+        data, etags, content_range = self._storage._read(self._name, *args)
+        if content_range is not None:
+            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
+            start_range, end_range = current_range.split('-', 1)
+            self._size, self.start_range = int(size), int(end_range)+1
+        self.file = StringIO(data)
+        return self.file.getvalue()
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._put_file(self._name, self.file.getvalue())
+        self.file.close()

backends/s3boto.py

+import os
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+AWS_HEADERS     = 'AWS_HEADERS'
+AWS_BUCKET_NAME = 'AWS_STORAGE_BUCKET_NAME'
+
+AWS_BUCKET_PREFIX = getattr(settings, AWS_BUCKET_NAME, {})
+
+try:
+    from boto.s3.connection import S3Connection
+    from boto.s3.key import Key
+except ImportError:
+    raise ImproperlyConfigured, "Could not load Boto's S3 bindings."
+
+class S3BotoStorage(Storage):
+    """Amazon Simple Storage Service using Boto"""
+    
+    def __init__(self, bucket="root", bucketprefix=AWS_BUCKET_PREFIX, access_key=None, secret_key=None, acl='public-read'):
+        self.acl = acl
+        
+        if not access_key and not secret_key:
+             access_key, secret_key = self._get_access_keys()
+        
+        self.connection = S3Connection(access_key, secret_key)
+        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
+        self.headers = getattr(settings, AWS_HEADERS, {})
+    
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+        
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+        
+        return None, None
+    
+    def _open(self, name, mode='rb'):
+        return S3BotoStorageFile(name, mode, self)
+    
+    def _save(self, name, content):
+        k = self.bucket.get_key(name)
+        if not k:
+            k = self.bucket.new_key(name)
+        k.set_contents_from_file(content)
+        return name
+    
+    def delete(self, name):
+        self.bucket.delete_key(name)
+    
+    def exists(self, name):
+        k = Key(self.bucket, name)
+        return k.exists()
+    
+    def listdir(self, name):
+        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
+    
+    def size(self, name):
+        return self.bucket.get_key(name).size
+    
+    def url(self, name):
+        return self.bucket.get_key(name).generate_url(3600, method='GET')
+    
+    def get_available_name(self, name):
+        """ Overwrite existing file with the same name. """
+        return name
+
+class S3BotoStorageFile(File):
+    def __init__(self, name, mode, storage):
+        self._storage = storage
+        self._name = name
+        self._mode = mode
+        self.key = storage.bucket.get_key(name)
+    
+    def size(self):
+        return self.key.size
+    
+    def read(self, *args, **kwargs):
+        return self.key.read(*args, **kwargs)
+    
+    def write(self, content):
+        self.key.set_contents_from_string(content)
+    
+    def close(self):
+        self.key.close()
+
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.