David Larlet avatar David Larlet committed 58069e6

Reorganisation of the repository, will ease packaging, thanks Ariel Nunez

Comments (0)

Files changed (25)

DatabaseStorage.py

-# DatabaseStorage for django.
-# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
-
-
-from django.core.files.storage import Storage
-from django.core.files import File
-from django.conf import settings
-
-import StringIO
-import urlparse
-
-import pyodbc
-
-class DatabaseStorage(Storage):
-    """
-    Class DatabaseStorage provides storing files in the database. 
-    """
-
-    def __init__(self, option=settings.DB_FILES):
-        """Constructor. 
-        
-        Constructs object using dictionary either specified in contucotr or
-in settings.DB_FILES. 
-        
-        @param option dictionary with 'db_table', 'fname_column',
-'blob_column', 'size_column', 'base_url'  keys. 
-        
-        option['db_table']
-            Table to work with.
-        option['fname_column']
-            Column in the 'db_table' containing filenames (filenames can
-contain pathes). Values should be the same as where FileField keeps
-filenames. 
-            It is used to map filename to blob_column. In sql it's simply
-used in where clause. 
-        option['blob_column']
-            Blob column (for example 'image' type), created manually in the
-'db_table', used to store image.
-        option['size_column']
-            Column to store file size. Used for optimization of size()
-method (another way is to open file and get size)
-        option['base_url']
-            Url prefix used with filenames. Should be mapped to the view,
-that returns an image as result. 
-        """
-        
-        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
-                              and option.has_key('size_column') and option.has_key('base_url') ):
-            raise ValueError("You didn't specify required options")
-        self.db_table = option['db_table']
-        self.fname_column = option['fname_column']
-        self.blob_column = option['blob_column']
-        self.size_column = option['size_column']
-        self.base_url = option['base_url']
-
-        #get database settings
-        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
-        self.DATABASE_NAME = settings.DATABASE_NAME
-        self.DATABASE_USER = settings.DATABASE_USER
-        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
-        self.DATABASE_HOST = settings.DATABASE_HOST
-        
-        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
-                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
-        self.cursor = self.connection.cursor()
-
-    def _open(self, name, mode='rb'):
-        """Open a file from database. 
-        
-        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
-        If there is no such file in the db, returs None
-        """
-        
-        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
-
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
-        if row is None:
-            return None
-        inMemFile = StringIO.StringIO(row[0])
-        inMemFile.name = name
-        inMemFile.mode = mode
-        
-        retFile = File(inMemFile)
-        return retFile
-
-    def _save(self, name, content):
-        """Save 'content' as file named 'name'.
-        
-        @note '\' in path will be converted to '/'. 
-        """
-        
-        name = name.replace('\\', '/')
-        binary = pyodbc.Binary(content.read())
-        size = len(binary)
-        
-        #todo: check result and do something (exception?) if failed.
-        if self.exists(name):
-            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
-                                 (binary, size)  )
-        else:
-            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
-        self.connection.commit()
-        return name
-
-    def exists(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
-        return row is not None
-    
-    def get_available_name(self, name):
-        return name
-
-    def delete(self, name):
-        if self.exists(name):
-            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
-            self.connection.commit()
-
-    def url(self, name):
-        if self.base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
-    
-    def size(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
-        if row is None:
-            return 0
-        else:
-            return int(row[0])

FTPStorage.py

-# FTP storage class for Django pluggable storage system.
-# Author: Rafal Jonca <jonca.rafal@gmail.com>
-# License: MIT
-# Comes from http://www.djangosnippets.org/snippets/1269/
-#
-# Usage:
-#
-# Add below to settings.py:
-# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
-#
-# In models.py you can write:
-# from FTPStorage import FTPStorage
-# fs = FTPStorage()
-# class FTPTest(models.Model):
-#     file = models.FileField(upload_to='a/b/c/', storage=fs)
-
-import os
-import ftplib
-import urlparse
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-
-class FTPStorageException(Exception): pass
-
-class FTPStorage(Storage):
-    """FTP Storage class for Django pluggable storage system."""
-
-    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
-        self._config = self._decode_location(location)
-        self._base_url = base_url
-        self._connection = None
-
-    def _decode_location(self, location):
-        """Return splitted configuration data from location."""
-        splitted_url = urlparse.urlparse(location)
-        config = {}
-        
-        if splitted_url.scheme not in ('ftp', 'aftp'):
-            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
-        if splitted_url.hostname == '':
-            raise ImproperlyConfigured('You must at least provide hostname!')
-            
-        if splitted_url.scheme == 'aftp':
-            config['active'] = True
-        else:
-            config['active'] = False
-        config['path'] = splitted_url.path
-        config['host'] = splitted_url.hostname
-        config['user'] = splitted_url.username
-        config['passwd'] = splitted_url.password
-        config['port'] = int(splitted_url.port)
-        
-        return config
-
-    def _start_connection(self):
-        # Check if connection is still alive and if not, drop it.
-        if self._connection is not None:
-            try:
-                self._connection.pwd()
-            except ftplib.all_errors, e:
-                self._connection = None
-        
-        # Real reconnect
-        if self._connection is None:
-            ftp = ftplib.FTP()
-            try:
-                ftp.connect(self._config['host'], self._config['port'])
-                ftp.login(self._config['user'], self._config['passwd'])
-                if self._config['active']:
-                    ftp.set_pasv(False)
-                if self._config['path'] != '':
-                    ftp.cwd(self._config['path'])
-                self._connection = ftp
-                return
-            except ftplib.all_errors, e:
-                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
-
-    def disconnect(self):
-        self._connection.quit()
-        self._connection = None
-
-    def _mkremdirs(self, path):
-        pwd = self._connection.pwd()
-        path_splitted = path.split('/')
-        for path_part in path_splitted:
-            try:
-                self._connection.cwd(path_part)
-            except:
-                try:
-                    self._connection.mkd(path_part)
-                    self._connection.cwd(path_part)
-                except ftplib.all_errors, e:
-                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
-        self._connection.cwd(pwd)
-        return
-
-    def _put_file(self, name, content):
-        # Connection must be open!
-        try:
-            self._mkremdirs(os.path.dirname(name))
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            memory_file = StringIO(content)
-            self._connection.storbinary('STOR ' + os.path.basename(name), memory_file, 8*1024)
-            memory_file.close()
-            self._connection.cwd(pwd)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error writing file %s' % name)
-
-    def _open(self, name, mode='rb'):
-        remote_file = FTPStorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name):
-        memory_file = StringIO()
-        try:
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
-            self._connection.cwd(pwd)
-            return memory_file
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error reading file %s' % name)
-        
-    def _save(self, name, content):
-        content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
-        self._start_connection()
-        self._put_file(name, content_str)
-        return name
-
-    def _get_dir_details(self, path):
-        # Connection must be open!
-        try:
-            lines = []
-            self._connection.retrlines('LIST '+path, lines.append)
-            dirs = {}
-            files = {}
-            for line in lines:
-                words = line.split()
-                if len(words) < 6:
-                    continue
-                if words[-2] == '->':
-                    continue
-                if words[0][0] == 'd':
-                    dirs[words[-1]] = 0;
-                elif words[0][0] == '-':
-                    files[words[-1]] = int(words[-5]);
-            return dirs, files
-        except ftplib.all_errors, msg:
-            raise FTPStorageException('Error getting listing for %s' % path)
-
-    def listdir(self, path):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(path)
-            return dirs.keys(), files.keys()
-        except FTPStorageException, e:
-            raise
-
-    def delete(self, name):
-        if not self.exists(name):
-            return
-        self._start_connection()
-        try:
-            self._connection.delete(name)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when removing %s' % name)                 
-
-    def exists(self, name):
-        self._start_connection()
-        try:
-            if name in self._connection.nlst(os.path.dirname(name)):
-                return True
-            else:
-                return False
-        except ftplib.error_temp, e:
-            return False
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when testing existence of %s' % name)            
-
-    def size(self, name):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(os.path.dirname(name))
-            if os.path.basename(name) in files:
-                return files[os.path.basename(name)]
-            else:
-                return 0
-        except FTPStorageException, e:
-            return 0
-
-    def url(self, name):
-        if self._base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
-
-class FTPStorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self._is_read = False
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if not self._is_read:
-            self._storage._start_connection()
-            self.file = self._storage._read(self._name)
-            self._storage._end_connection()
-            self._is_read = True
-            
-        return self.file.read(num_bytes)
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-        self._is_read = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._start_connection()
-            self._storage._put_file(self._name, self.file.getvalue())
-            self._storage._end_connection()
-        self.file.close()

ImageStorage.py

-
-import os
-from PIL import ImageFile as PILImageFile
-from django.core.files.storage import FileSystemStorage
-
-
-class ImageStorage(FileSystemStorage):
-    """
-    A FileSystemStorage which normalizes extensions for images.
-    
-    Comes from http://www.djangosnippets.org/snippets/965/
-    """
-    
-    def find_extension(self, format):
-        """Normalizes PIL-returned format into a standard, lowercase extension."""
-        format = format.lower()
-        
-        if format == 'jpeg':
-            format = 'jpg'
-        
-        return format
-    
-    def save(self, name, content):
-        dirname = os.path.dirname(name)
-        basename = os.path.basename(name)
-        
-        # Use PIL to determine filetype
-        
-        p = PILImageFile.Parser()
-        while 1:
-            data = content.read(1024)
-            if not data:
-                break
-            p.feed(data)
-            if p.image:
-                im = p.image
-                break
-        
-        extension = self.find_extension(im.format)
-        
-        # Does the basename already have an extension? If so, replace it.
-        # bare as in without extension
-        bare_basename = basename if '.' not in basename else basename[:basename.rindex('.')]
-        basename = bare_basename + '.' + extension
-        
-        name = os.path.join(dirname, basename)
-        return super(ImageStorage, self).save(name, content)
-    

MogileFSStorage.py

-import urlparse
-from StringIO import StringIO
-from mimetypes import guess_type
-
-from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
-from django.conf import settings
-from django.utils.text import force_unicode
-from django.http import HttpResponse, HttpResponseNotFound
-from django.core.cache import cache
-
-import mogilefs
-
-
-class MogileFSStorage(Storage):
-    """MogileFS filesystem storage"""
-    def __init__(self, base_url=settings.MEDIA_URL):
-        
-        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
-        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
-            self.base_url = settings.MOGILEFS_MEDIA_URL
-        else:
-            self.base_url = base_url
-                
-        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
-            if not hasattr(settings, var):
-                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
-            
-        self.trackers = settings.MOGILEFS_TRACKERS
-        self.domain = settings.MOGILEFS_DOMAIN
-        self.client = mogilefs.Client(self.domain, self.trackers)
-    
-    def get_mogile_paths(self, filename):
-        return self.client.get_paths(filename)  
-    
-    # The following methods define the Backend API
-
-    def filesize(self, filename):
-        raise NotImplemented
-        #return os.path.getsize(self._get_absolute_path(filename))
-    
-    def path(self, filename):
-        paths = self.get_mogile_paths(filename)
-        if paths:
-            return self.get_mogile_paths(filename)[0]
-        else:
-            return None
-    
-    def url(self, filename):
-        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
-
-    def open(self, filename, mode='rb'):
-        raise NotImplemented
-        #return open(self._get_absolute_path(filename), mode)
-
-    def exists(self, filename):
-        return filename in self.client
-
-    def save(self, filename, raw_contents):
-        filename = self.get_available_filename(filename)
-        
-        if not hasattr(self, 'mogile_class'):
-            self.mogile_class = None
-
-        # Write the file to mogile
-        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
-        if success:
-            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
-        else:
-            print "FAILURE writing file %s" % (filename)
-
-        return force_unicode(filename.replace('\\', '/'))
-
-    def delete(self, filename):
-        
-        self.client.delete(filename)
-            
-        
-def serve_mogilefs_file(request, key=None):
-    """
-    Called when a user requests an image.
-    Either reproxy the path to perlbal, or serve the image outright
-    """
-    # not the best way to do this, since we create a client each time
-    mimetype = guess_type(key)[0] or "application/x-octet-stream"
-    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
-    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
-        # we're reproxying with perlbal
-        
-        # check the path cache
-        
-        path = cache.get(key)
-
-        if not path:
-            path = client.get_paths(key)
-            cache.set(key, path, 60)
-    
-        if path:
-            response = HttpResponse(content_type=mimetype)
-            response['X-REPROXY-URL'] = path[0]
-        else:
-            response = HttpResponseNotFound()
-    
-    else:
-        # we don't have perlbal, let's just serve the image via django
-        file_data = client[key]
-        if file_data:
-            response = HttpResponse(file_data, mimetype=mimetype)
-        else:
-            response = HttpResponseNotFound()
-    
-    return response

OverwriteStorage.py

-import os
-
-from django.conf import settings
-from django.core.files.storage import FileSystemStorage
-
-class OverwriteStorage(FileSystemStorage):
-    
-    def get_available_name(self, name):
-        """
-        Returns a filename that's free on the target storage system, and
-        available for new content to be written to.
-        
-        Comes from http://www.djangosnippets.org/snippets/976/
-        (even if it already exists in S3Storage for ages)
-        """
-        # If the filename already exists, remove it as if it was a true file system
-        if self.exists(name):
-            os.remove(os.path.join(settings.MEDIA_ROOT, name))
-        return name

S3BotoStorage.py

-import os
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-AWS_HEADERS = 'AWS_HEADERS'
-
-try:
-    from boto.s3.connection import S3Connection
-    from boto.s3.key import Key
-except ImportError:
-    raise ImproperlyConfigured, "Could not load Boto's S3 bindings."
-
-class S3BotoStorage(Storage):
-    """Amazon Simple Storage Service using Boto"""
-    
-    def __init__(self, bucket="root", bucketprefix=settings.AWS_BUCKET_PREFIX, access_key=None, secret_key=None, acl='public-read'):
-        self.acl = acl
-        
-        if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-        
-        self.connection = S3Connection(access_key, secret_key)
-        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
-        self.headers = getattr(settings, AWS_HEADERS, {})
-    
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-        
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-        
-        return None, None
-    
-    def _open(self, name, mode='rb'):
-        return S3BotoStorageFile(name, mode, self)
-    
-    def _save(self, name, content):
-        k = self.bucket.get_key(name)
-        if not k:
-            k = self.bucket.new_key(name)
-        k.set_contents_from_file(content)
-        return name
-    
-    def delete(self, name):
-        self.bucket.delete_key(name)
-    
-    def exists(self, name):
-        k = Key(self.bucket, name)
-        return k.exists()
-    
-    def listdir(self, name):
-        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
-    
-    def size(self, name):
-        return self.bucket.get_key(name).size
-    
-    def url(self, name):
-        return self.bucket.get_key(name).generate_url(3600, method='GET')
-    
-    def get_available_name(self, name):
-        """ Overwrite existing file with the same name. """
-        return name
-
-class S3BotoStorageFile(File):
-    def __init__(self, name, mode, storage):
-        self._storage = storage
-        self._name = name
-        self._mode = mode
-        self.key = storage.bucket.get_key(name)
-    
-    def size(self):
-        return self.key.size
-    
-    def read(self, *args, **kwargs):
-        return self.key.read(*args, **kwargs)
-    
-    def write(self, content):
-        self.key.set_contents_from_string(content)
-    
-    def close(self):
-        self.key.close()
-

S3Storage.py

-import os
-from mimetypes import guess_type
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-AWS_HEADERS = 'AWS_HEADERS'
-
-try:
-    from S3 import AWSAuthConnection, QueryStringAuthGenerator
-except ImportError:
-    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
-    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
-
-
-class S3Storage(Storage):
-    """Amazon Simple Storage Service"""
-
-    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
-            access_key=None, secret_key=None, acl='public-read', 
-            calling_format=settings.AWS_CALLING_FORMAT):
-        self.bucket = bucket
-        self.acl = acl
-
-        if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-
-        self.connection = AWSAuthConnection(access_key, secret_key, 
-                            calling_format=calling_format)
-        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
-                            calling_format=calling_format, is_secure=False)
-        
-        self.headers = getattr(settings, AWS_HEADERS, {})
-
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-
-        return None, None
-
-    def _get_connection(self):
-        return AWSAuthConnection(*self._get_access_keys())
-
-    def _put_file(self, name, content):
-        content_type = guess_type(name)[0] or "application/x-octet-stream"
-        self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
-        response = self.connection.put(self.bucket, name, content, self.headers)
-
-    def _open(self, name, mode='rb'):
-        remote_file = S3StorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name, start_range=None, end_range=None):
-        if start_range is None:
-            headers = {}
-        else:
-            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
-        response = self.connection.get(self.bucket, name, headers)
-        headers = response.http_response.msg
-        return response.object.data, headers['etag'], headers.get('content-range', None)
-        
-    def _save(self, name, content):
-        content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
-        self._put_file(name, content_str)
-        return name
-    
-    def delete(self, name):
-        self.connection.delete(self.bucket, name)
-
-    def exists(self, name):
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        return response.status == 200
-
-    def size(self, name):
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        content_length = response.getheader('Content-Length')
-        return content_length and int(content_length) or 0
-    
-    def url(self, name):
-        return self.generator.make_bare_url(self.bucket, name)
-
-    ## UNCOMMENT BELOW IF NECESSARY
-    #def get_available_name(self, name):
-    #    """ Overwrite existing file with the same name. """
-    #    return name
-
-
-class S3StorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self.start_range = 0
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if num_bytes is None:
-            args = []
-            self.start_range = 0
-        else:
-            args = [self.start_range, self.start_range+num_bytes-1]
-        data, etags, content_range = self._storage._read(self._name, *args)
-        if content_range is not None:
-            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
-            start_range, end_range = current_range.split('-', 1)
-            self._size, self.start_range = int(size), int(end_range)+1
-        self.file = StringIO(data)
-        return self.file.getvalue()
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._put_file(self._name, self.file.getvalue())
-        self.file.close()

backends/DatabaseStorage.py

+# DatabaseStorage for django.
+# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
+
+
+from django.core.files.storage import Storage
+from django.core.files import File
+from django.conf import settings
+
+import StringIO
+import urlparse
+
+import pyodbc
+
+class DatabaseStorage(Storage):
+    """
+    Class DatabaseStorage provides storing files in the database. 
+    """
+
+    def __init__(self, option=settings.DB_FILES):
+        """Constructor. 
+        
+        Constructs object using dictionary either specified in contucotr or
+in settings.DB_FILES. 
+        
+        @param option dictionary with 'db_table', 'fname_column',
+'blob_column', 'size_column', 'base_url'  keys. 
+        
+        option['db_table']
+            Table to work with.
+        option['fname_column']
+            Column in the 'db_table' containing filenames (filenames can
+contain pathes). Values should be the same as where FileField keeps
+filenames. 
+            It is used to map filename to blob_column. In sql it's simply
+used in where clause. 
+        option['blob_column']
+            Blob column (for example 'image' type), created manually in the
+'db_table', used to store image.
+        option['size_column']
+            Column to store file size. Used for optimization of size()
+method (another way is to open file and get size)
+        option['base_url']
+            Url prefix used with filenames. Should be mapped to the view,
+that returns an image as result. 
+        """
+        
+        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
+                              and option.has_key('size_column') and option.has_key('base_url') ):
+            raise ValueError("You didn't specify required options")
+        self.db_table = option['db_table']
+        self.fname_column = option['fname_column']
+        self.blob_column = option['blob_column']
+        self.size_column = option['size_column']
+        self.base_url = option['base_url']
+
+        #get database settings
+        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
+        self.DATABASE_NAME = settings.DATABASE_NAME
+        self.DATABASE_USER = settings.DATABASE_USER
+        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
+        self.DATABASE_HOST = settings.DATABASE_HOST
+        
+        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
+                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
+        self.cursor = self.connection.cursor()
+
+    def _open(self, name, mode='rb'):
+        """Open a file from database. 
+        
+        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
+        If there is no such file in the db, returs None
+        """
+        
+        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
+
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
+        if row is None:
+            return None
+        inMemFile = StringIO.StringIO(row[0])
+        inMemFile.name = name
+        inMemFile.mode = mode
+        
+        retFile = File(inMemFile)
+        return retFile
+
+    def _save(self, name, content):
+        """Save 'content' as file named 'name'.
+        
+        @note '\' in path will be converted to '/'. 
+        """
+        
+        name = name.replace('\\', '/')
+        binary = pyodbc.Binary(content.read())
+        size = len(binary)
+        
+        #todo: check result and do something (exception?) if failed.
+        if self.exists(name):
+            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
+                                 (binary, size)  )
+        else:
+            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
+        self.connection.commit()
+        return name
+
+    def exists(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
+        return row is not None
+    
+    def get_available_name(self, name):
+        return name
+
+    def delete(self, name):
+        if self.exists(name):
+            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
+            self.connection.commit()
+
+    def url(self, name):
+        if self.base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+    
+    def size(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
+        if row is None:
+            return 0
+        else:
+            return int(row[0])

backends/FTPStorage.py

+# FTP storage class for Django pluggable storage system.
+# Author: Rafal Jonca <jonca.rafal@gmail.com>
+# License: MIT
+# Comes from http://www.djangosnippets.org/snippets/1269/
+#
+# Usage:
+#
+# Add below to settings.py:
+# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
+#
+# In models.py you can write:
+# from FTPStorage import FTPStorage
+# fs = FTPStorage()
+# class FTPTest(models.Model):
+#     file = models.FileField(upload_to='a/b/c/', storage=fs)
+
+import os
+import ftplib
+import urlparse
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+
+class FTPStorageException(Exception): pass
+
+class FTPStorage(Storage):
+    """FTP Storage class for Django pluggable storage system."""
+
+    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
+        self._config = self._decode_location(location)
+        self._base_url = base_url
+        self._connection = None
+
+    def _decode_location(self, location):
+        """Return splitted configuration data from location."""
+        splitted_url = urlparse.urlparse(location)
+        config = {}
+        
+        if splitted_url.scheme not in ('ftp', 'aftp'):
+            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
+        if splitted_url.hostname == '':
+            raise ImproperlyConfigured('You must at least provide hostname!')
+            
+        if splitted_url.scheme == 'aftp':
+            config['active'] = True
+        else:
+            config['active'] = False
+        config['path'] = splitted_url.path
+        config['host'] = splitted_url.hostname
+        config['user'] = splitted_url.username
+        config['passwd'] = splitted_url.password
+        config['port'] = int(splitted_url.port)
+        
+        return config
+
+    def _start_connection(self):
+        # Check if connection is still alive and if not, drop it.
+        if self._connection is not None:
+            try:
+                self._connection.pwd()
+            except ftplib.all_errors, e:
+                self._connection = None
+        
+        # Real reconnect
+        if self._connection is None:
+            ftp = ftplib.FTP()
+            try:
+                ftp.connect(self._config['host'], self._config['port'])
+                ftp.login(self._config['user'], self._config['passwd'])
+                if self._config['active']:
+                    ftp.set_pasv(False)
+                if self._config['path'] != '':
+                    ftp.cwd(self._config['path'])
+                self._connection = ftp
+                return
+            except ftplib.all_errors, e:
+                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
+
+    def disconnect(self):
+        self._connection.quit()
+        self._connection = None
+
+    def _mkremdirs(self, path):
+        pwd = self._connection.pwd()
+        path_splitted = path.split('/')
+        for path_part in path_splitted:
+            try:
+                self._connection.cwd(path_part)
+            except:
+                try:
+                    self._connection.mkd(path_part)
+                    self._connection.cwd(path_part)
+                except ftplib.all_errors, e:
+                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
+        self._connection.cwd(pwd)
+        return
+
+    def _put_file(self, name, content):
+        # Connection must be open!
+        try:
+            self._mkremdirs(os.path.dirname(name))
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            memory_file = StringIO(content)
+            self._connection.storbinary('STOR ' + os.path.basename(name), memory_file, 8*1024)
+            memory_file.close()
+            self._connection.cwd(pwd)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error writing file %s' % name)
+
+    def _open(self, name, mode='rb'):
+        remote_file = FTPStorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name):
+        memory_file = StringIO()
+        try:
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
+            self._connection.cwd(pwd)
+            return memory_file
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error reading file %s' % name)
+        
+    def _save(self, name, content):
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        self._start_connection()
+        self._put_file(name, content_str)
+        return name
+
+    def _get_dir_details(self, path):
+        # Connection must be open!
+        try:
+            lines = []
+            self._connection.retrlines('LIST '+path, lines.append)
+            dirs = {}
+            files = {}
+            for line in lines:
+                words = line.split()
+                if len(words) < 6:
+                    continue
+                if words[-2] == '->':
+                    continue
+                if words[0][0] == 'd':
+                    dirs[words[-1]] = 0;
+                elif words[0][0] == '-':
+                    files[words[-1]] = int(words[-5]);
+            return dirs, files
+        except ftplib.all_errors, msg:
+            raise FTPStorageException('Error getting listing for %s' % path)
+
+    def listdir(self, path):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(path)
+            return dirs.keys(), files.keys()
+        except FTPStorageException, e:
+            raise
+
+    def delete(self, name):
+        if not self.exists(name):
+            return
+        self._start_connection()
+        try:
+            self._connection.delete(name)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when removing %s' % name)                 
+
+    def exists(self, name):
+        self._start_connection()
+        try:
+            if name in self._connection.nlst(os.path.dirname(name)):
+                return True
+            else:
+                return False
+        except ftplib.error_temp, e:
+            return False
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when testing existence of %s' % name)            
+
+    def size(self, name):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(os.path.dirname(name))
+            if os.path.basename(name) in files:
+                return files[os.path.basename(name)]
+            else:
+                return 0
+        except FTPStorageException, e:
+            return 0
+
+    def url(self, name):
+        if self._base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
+
+class FTPStorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self._is_read = False
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if not self._is_read:
+            self._storage._start_connection()
+            self.file = self._storage._read(self._name)
+            self._storage._end_connection()
+            self._is_read = True
+            
+        return self.file.read(num_bytes)
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+        self._is_read = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._start_connection()
+            self._storage._put_file(self._name, self.file.getvalue())
+            self._storage._end_connection()
+        self.file.close()

backends/ImageStorage.py

+
+import os
+from PIL import ImageFile as PILImageFile
+from django.core.files.storage import FileSystemStorage
+
+
+class ImageStorage(FileSystemStorage):
+    """
+    A FileSystemStorage which normalizes extensions for images.
+    
+    Comes from http://www.djangosnippets.org/snippets/965/
+    """
+    
+    def find_extension(self, format):
+        """Normalizes PIL-returned format into a standard, lowercase extension."""
+        format = format.lower()
+        
+        if format == 'jpeg':
+            format = 'jpg'
+        
+        return format
+    
+    def save(self, name, content):
+        dirname = os.path.dirname(name)
+        basename = os.path.basename(name)
+        
+        # Use PIL to determine filetype
+        
+        p = PILImageFile.Parser()
+        while 1:
+            data = content.read(1024)
+            if not data:
+                break
+            p.feed(data)
+            if p.image:
+                im = p.image
+                break
+        
+        extension = self.find_extension(im.format)
+        
+        # Does the basename already have an extension? If so, replace it.
+        # bare as in without extension
+        bare_basename = basename if '.' not in basename else basename[:basename.rindex('.')]
+        basename = bare_basename + '.' + extension
+        
+        name = os.path.join(dirname, basename)
+        return super(ImageStorage, self).save(name, content)
+    

backends/MogileFSStorage.py

+import urlparse
+from StringIO import StringIO
+from mimetypes import guess_type
+
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+from django.conf import settings
+from django.utils.text import force_unicode
+from django.http import HttpResponse, HttpResponseNotFound
+from django.core.cache import cache
+
+import mogilefs
+
+
+class MogileFSStorage(Storage):
+    """MogileFS filesystem storage"""
+    def __init__(self, base_url=settings.MEDIA_URL):
+        
+        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
+        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
+            self.base_url = settings.MOGILEFS_MEDIA_URL
+        else:
+            self.base_url = base_url
+                
+        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
+            if not hasattr(settings, var):
+                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
+            
+        self.trackers = settings.MOGILEFS_TRACKERS
+        self.domain = settings.MOGILEFS_DOMAIN
+        self.client = mogilefs.Client(self.domain, self.trackers)
+    
+    def get_mogile_paths(self, filename):
+        return self.client.get_paths(filename)  
+    
+    # The following methods define the Backend API
+
+    def filesize(self, filename):
+        raise NotImplemented
+        #return os.path.getsize(self._get_absolute_path(filename))
+    
+    def path(self, filename):
+        paths = self.get_mogile_paths(filename)
+        if paths:
+            return self.get_mogile_paths(filename)[0]
+        else:
+            return None
+    
+    def url(self, filename):
+        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
+
+    def open(self, filename, mode='rb'):
+        raise NotImplemented
+        #return open(self._get_absolute_path(filename), mode)
+
+    def exists(self, filename):
+        return filename in self.client
+
+    def save(self, filename, raw_contents):
+        filename = self.get_available_filename(filename)
+        
+        if not hasattr(self, 'mogile_class'):
+            self.mogile_class = None
+
+        # Write the file to mogile
+        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
+        if success:
+            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
+        else:
+            print "FAILURE writing file %s" % (filename)
+
+        return force_unicode(filename.replace('\\', '/'))
+
+    def delete(self, filename):
+        
+        self.client.delete(filename)
+            
+        
+def serve_mogilefs_file(request, key=None):
+    """
+    Called when a user requests an image.
+    Either reproxy the path to perlbal, or serve the image outright
+    """
+    # not the best way to do this, since we create a client each time
+    mimetype = guess_type(key)[0] or "application/x-octet-stream"
+    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
+    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
+        # we're reproxying with perlbal
+        
+        # check the path cache
+        
+        path = cache.get(key)
+
+        if not path:
+            path = client.get_paths(key)
+            cache.set(key, path, 60)
+    
+        if path:
+            response = HttpResponse(content_type=mimetype)
+            response['X-REPROXY-URL'] = path[0]
+        else:
+            response = HttpResponseNotFound()
+    
+    else:
+        # we don't have perlbal, let's just serve the image via django
+        file_data = client[key]
+        if file_data:
+            response = HttpResponse(file_data, mimetype=mimetype)
+        else:
+            response = HttpResponseNotFound()
+    
+    return response

backends/OverwriteStorage.py

+import os
+
+from django.conf import settings
+from django.core.files.storage import FileSystemStorage
+
+class OverwriteStorage(FileSystemStorage):
+    
+    def get_available_name(self, name):
+        """
+        Returns a filename that's free on the target storage system, and
+        available for new content to be written to.
+        
+        Comes from http://www.djangosnippets.org/snippets/976/
+        (even if it already exists in S3Storage for ages)
+        """
+        # If the filename already exists, remove it as if it was a true file system
+        if self.exists(name):
+            os.remove(os.path.join(settings.MEDIA_ROOT, name))
+        return name

backends/S3BotoStorage.py

+import os
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+AWS_HEADERS = 'AWS_HEADERS'
+
+try:
+    from boto.s3.connection import S3Connection
+    from boto.s3.key import Key
+except ImportError:
+    raise ImproperlyConfigured, "Could not load Boto's S3 bindings."
+
+class S3BotoStorage(Storage):
+    """Amazon Simple Storage Service using Boto"""
+    
+    def __init__(self, bucket="root", bucketprefix=settings.AWS_BUCKET_PREFIX, access_key=None, secret_key=None, acl='public-read'):
+        self.acl = acl
+        
+        if not access_key and not secret_key:
+             access_key, secret_key = self._get_access_keys()
+        
+        self.connection = S3Connection(access_key, secret_key)
+        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
+        self.headers = getattr(settings, AWS_HEADERS, {})
+    
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+        
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+        
+        return None, None
+    
+    def _open(self, name, mode='rb'):
+        return S3BotoStorageFile(name, mode, self)
+    
+    def _save(self, name, content):
+        k = self.bucket.get_key(name)
+        if not k:
+            k = self.bucket.new_key(name)
+        k.set_contents_from_file(content)
+        return name
+    
+    def delete(self, name):
+        self.bucket.delete_key(name)
+    
+    def exists(self, name):
+        k = Key(self.bucket, name)
+        return k.exists()
+    
+    def listdir(self, name):
+        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
+    
+    def size(self, name):
+        return self.bucket.get_key(name).size
+    
+    def url(self, name):
+        return self.bucket.get_key(name).generate_url(3600, method='GET')
+    
+    def get_available_name(self, name):
+        """ Overwrite existing file with the same name. """
+        return name
+
+class S3BotoStorageFile(File):
+    def __init__(self, name, mode, storage):
+        self._storage = storage
+        self._name = name
+        self._mode = mode
+        self.key = storage.bucket.get_key(name)
+    
+    def size(self):
+        return self.key.size
+    
+    def read(self, *args, **kwargs):
+        return self.key.read(*args, **kwargs)
+    
+    def write(self, content):
+        self.key.set_contents_from_string(content)
+    
+    def close(self):
+        self.key.close()
+

backends/S3Storage.py

+import os
+from mimetypes import guess_type
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+AWS_HEADERS = 'AWS_HEADERS'
+
+try:
+    from S3 import AWSAuthConnection, QueryStringAuthGenerator
+except ImportError:
+    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
+    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
+
+
+class S3Storage(Storage):
+    """Amazon Simple Storage Service"""
+
+    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME, 
+            access_key=None, secret_key=None, acl='public-read', 
+            calling_format=settings.AWS_CALLING_FORMAT):
+        self.bucket = bucket
+        self.acl = acl
+
+        if not access_key and not secret_key:
+             access_key, secret_key = self._get_access_keys()
+
+        self.connection = AWSAuthConnection(access_key, secret_key, 
+                            calling_format=calling_format)
+        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
+                            calling_format=calling_format, is_secure=False)
+        
+        self.headers = getattr(settings, AWS_HEADERS, {})
+
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+
+        return None, None
+
+    def _get_connection(self):
+        return AWSAuthConnection(*self._get_access_keys())
+
+    def _put_file(self, name, content):
+        content_type = guess_type(name)[0] or "application/x-octet-stream"
+        self.headers.update({'x-amz-acl': self.acl, 'Content-Type': content_type})
+        response = self.connection.put(self.bucket, name, content, self.headers)
+
+    def _open(self, name, mode='rb'):
+        remote_file = S3StorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name, start_range=None, end_range=None):
+        if start_range is None:
+            headers = {}
+        else:
+            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
+        response = self.connection.get(self.bucket, name, headers)
+        headers = response.http_response.msg
+        return response.object.data, headers['etag'], headers.get('content-range', None)
+        
+    def _save(self, name, content):
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        self._put_file(name, content_str)
+        return name
+    
+    def delete(self, name):
+        self.connection.delete(self.bucket, name)
+
+    def exists(self, name):
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        return response.status == 200
+
+    def size(self, name):
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        content_length = response.getheader('Content-Length')
+        return content_length and int(content_length) or 0
+    
+    def url(self, name):
+        return self.generator.make_bare_url(self.bucket, name)
+
+    ## UNCOMMENT BELOW IF NECESSARY
+    #def get_available_name(self, name):
+    #    """ Overwrite existing file with the same name. """
+    #    return name
+
+
+class S3StorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self.start_range = 0
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if num_bytes is None:
+            args = []
+            self.start_range = 0
+        else:
+            args = [self.start_range, self.start_range+num_bytes-1]
+        data, etags, content_range = self._storage._read(self._name, *args)
+        if content_range is not None:
+            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
+            start_range, end_range = current_range.split('-', 1)
+            self._size, self.start_range = int(size), int(end_range)+1
+        self.file = StringIO(data)
+        return self.file.getvalue()
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._put_file(self._name, self.file.getvalue())
+        self.file.close()

Empty file added.

django-mogilefs-storage.txt

-================
-MogileFS Storage
-================
-
-The MogileFS storage backend is fairly simple: it uses URLs (or, rather, 
-parts of URLs) as keys into the mogile database. When the user requests a file 
-stored by mogile (say, an avatar), the URL gets passed to a view which, using 
-a client to the mogile tracker, retrieves the "correct" path (the path that 
-points to the actual file data). The view will then either return the path(s) 
-to perlbal to reproxy, or, if you're not using perlbal to reproxy 
-(which you should), it serves the data of the file directly from django.
-
-In order for the backend to work, we need to add a few settings variables:
-
-    * ``MOGILEFS_DOMAIN``: The mogile domain that files should read 
-      from/written to, e.g "production"
-    * ``MOGILEFS_TRACKERS``: A list of trackers to connect to, 
-      e.g. ["foo.sample.com:7001", "bar.sample.com:7001"]
-    * ``MOGILEFS_MEDIA_URL`` (optional): The prefix for URLs that point to 
-      mogile files. This is used in a similar way to ``MEDIA_URL``, 
-      e.g. "/mogilefs/"
-    * ``SERVE_WITH_PERLBAL``: Boolean that, when True, will pass the paths 
-      back in the response in the ``X-REPROXY-URL`` header. If False, django 
-      will serve all mogile media files itself (bad idea for production, 
-      but useful if you're testing on a setup that doesn't have perlbal 
-      running)
-    * ``DEFAULT_FILE_STORAGE``: This is the class that's used for the backend.
-      You'll want to set this to ``project.app.storages.MogileFSStorage``
-      (or wherever you've installed the backend) 
-
- 
-
-Getting files into mogile
--------------------------
-
-The great thing about file backends is that we just need to specify the 
-backend in the model file and everything is taken care for us - all the 
-default save() methods work correctly.
-
-For Fluther, we have two main media types we use mogile for: avatars and 
-thumbnails. Mogile defines "classes" that dictate how each type of file is 
-replicated - so you can make sure you have 3 copies of the original avatar 
-but only 1 of the thumbnail.
-
-In order for classes to behave nicely with the backend framework, we've had to 
-do a little tomfoolery. (This is something that may change in future versions 
-of the filestorage framework).
-
-Here's what the models.py file looks like for the avatars::
-
-    from django.core.filestorage import storage
-    
-    # TODO: Find a better way to deal with classes. Maybe a generator?
-    class AvatarStorage(storage.__class__):
-        mogile_class = 'avatar' 
-    
-    class ThumbnailStorage(storage.__class__):
-        mogile_class = 'thumb'
-    
-    class Avatar(models.Model):
-        user = models.ForeignKey(User, null=True, blank=True)
-        image = models.ImageField(storage=AvatarStorage())
-        thumb = models.ImageField(storage=ThumbnailStorage())
-
-Each of the custom storage classes defines a ``class`` attribute which gets 
-passed to the mogile backend behind the scenes.  If you don't want to worry 
-about mogile classes, don't need to define a custom storage engine or specify 
-it in the field - the default should work just fine.
-
-Serving files from mogile
--------------------------
-
-Now, all we need to do is plug in the view that serves up mogile data. 
-
-Here's what we use::
-
-  urlpatterns += patterns(",
-      (r'^%s(?P<key>.*)' % settings.MOGILEFS_MEDIA_URL[1:], 
-          'MogileFSStorage.serve_mogilefs_file')
-  )
-
-Any url beginning with the value of ``MOGILEFS_MEDIA_URL`` will get passed to 
-our view. Since ``MOGILEFS_MEDIA_URL`` requires a leading slash (like 
-``MEDIA_URL``), we strip that off and pass the rest of the url over to the 
-view.
-
-That's it! Happy mogiling!

docs/django-mogilefs-storage.txt

+================
+MogileFS Storage
+================
+
+The MogileFS storage backend is fairly simple: it uses URLs (or, rather, 
+parts of URLs) as keys into the mogile database. When the user requests a file 
+stored by mogile (say, an avatar), the URL gets passed to a view which, using 
+a client to the mogile tracker, retrieves the "correct" path (the path that 
+points to the actual file data). The view will then either return the path(s) 
+to perlbal to reproxy, or, if you're not using perlbal to reproxy 
+(which you should), it serves the data of the file directly from django.
+
+In order for the backend to work, we need to add a few settings variables:
+
+    * ``MOGILEFS_DOMAIN``: The mogile domain that files should read 
+      from/written to, e.g "production"
+    * ``MOGILEFS_TRACKERS``: A list of trackers to connect to, 
+      e.g. ["foo.sample.com:7001", "bar.sample.com:7001"]
+    * ``MOGILEFS_MEDIA_URL`` (optional): The prefix for URLs that point to 
+      mogile files. This is used in a similar way to ``MEDIA_URL``, 
+      e.g. "/mogilefs/"
+    * ``SERVE_WITH_PERLBAL``: Boolean that, when True, will pass the paths 
+      back in the response in the ``X-REPROXY-URL`` header. If False, django 
+      will serve all mogile media files itself (bad idea for production, 
+      but useful if you're testing on a setup that doesn't have perlbal 
+      running)
+    * ``DEFAULT_FILE_STORAGE``: This is the class that's used for the backend.
+      You'll want to set this to ``project.app.storages.MogileFSStorage``
+      (or wherever you've installed the backend) 
+
+ 
+
+Getting files into mogile
+-------------------------
+
+The great thing about file backends is that we just need to specify the 
+backend in the model file and everything is taken care for us - all the 
+default save() methods work correctly.
+
+For Fluther, we have two main media types we use mogile for: avatars and 
+thumbnails. Mogile defines "classes" that dictate how each type of file is 
+replicated - so you can make sure you have 3 copies of the original avatar 
+but only 1 of the thumbnail.
+
+In order for classes to behave nicely with the backend framework, we've had to 
+do a little tomfoolery. (This is something that may change in future versions 
+of the filestorage framework).
+
+Here's what the models.py file looks like for the avatars::
+
+    from django.core.filestorage import storage
+    
+    # TODO: Find a better way to deal with classes. Maybe a generator?
+    class AvatarStorage(storage.__class__):
+        mogile_class = 'avatar' 
+    
+    class ThumbnailStorage(storage.__class__):
+        mogile_class = 'thumb'
+    
+    class Avatar(models.Model):
+        user = models.ForeignKey(User, null=True, blank=True)
+        image = models.ImageField(storage=AvatarStorage())
+        thumb = models.ImageField(storage=ThumbnailStorage())
+
+Each of the custom storage classes defines a ``class`` attribute which gets 
+passed to the mogile backend behind the scenes.  If you don't want to worry 
+about mogile classes, don't need to define a custom storage engine or specify 
+it in the field - the default should work just fine.
+
+Serving files from mogile
+-------------------------
+
+Now, all we need to do is plug in the view that serves up mogile data. 
+
+Here's what we use::
+
+  urlpatterns += patterns(",
+      (r'^%s(?P<key>.*)' % settings.MOGILEFS_MEDIA_URL[1:], 
+          'MogileFSStorage.serve_mogilefs_file')
+  )
+
+Any url beginning with the value of ``MOGILEFS_MEDIA_URL`` will get passed to 
+our view. Since ``MOGILEFS_MEDIA_URL`` requires a leading slash (like 
+``MEDIA_URL``), we strip that off and pass the rest of the url over to the 
+view.
+
+That's it! Happy mogiling!
Add a comment to this file

examples/s3project/__init__.py

Empty file added.

examples/s3project/manage.py

+# put patched django and S3 in PYTHONPATH
+import sys, os
+sys.path = [os.path.join(os.getcwd(), '../')] + sys.path
+
+from django.core.management import execute_manager
+
+try:
+    import settings # Assumed to be in the same directory.
+except ImportError:
+    import sys
+    sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
+    sys.exit(1)
+
+if __name__ == "__main__":
+    execute_manager(settings)

examples/s3project/models.py

+
+import tempfile
+
+from django.db import models
+from django.core.files.base import ContentFile
+from django.core.files.storage import default_storage as s3_storage
+from django.core.cache import cache
+
+# Write out a file to be used as default content
+s3_storage.save('tests/default.txt', ContentFile('default content'))
+
+class MyStorage(models.Model):
+    def custom_upload_to(self, filename):
+        return 'foo'
+
+    def random_upload_to(self, filename):
+        # This returns a different result each time,
+        # to make sure it only gets called once.
+        import random
+        return '%s/%s' % (random.randint(100, 999), filename)
+
+    normal = models.FileField(storage=s3_storage, upload_to='tests')
+    custom = models.FileField(storage=s3_storage, upload_to=custom_upload_to)
+    random = models.FileField(storage=s3_storage, upload_to=random_upload_to)
+    default = models.FileField(storage=s3_storage, upload_to='tests', default='tests/default.txt')

examples/s3project/tests.py

+"""
+=================
+Django S3 storage
+=================
+
+Usage
+=====
+
+Settings
+--------
+
+``DEFAULT_FILE_STORAGE``
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+This setting store the path to the S3 storage class, the first part correspond
+to the filepath and the second the name of the class, if you've got
+``example.com`` in your ``PYTHONPATH`` and store your storage file in
+``example.com/libs/storages/S3Storage.py``, the resulting setting will be::
+
+    DEFAULT_FILE_STORAGE = 'libs.storages.S3Storage.S3Storage'
+
+If you keep the same filename as in repository, it should always end with 
+``S3Storage.S3Storage``.
+
+``AWS_ACCESS_KEY_ID``
+~~~~~~~~~~~~~~~~~~~~~
+
+Your Amazon Web Services access key, as a string.
+
+``AWS_SECRET_ACCESS_KEY``
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Your Amazon Web Services secret access key, as a string.
+
+``AWS_STORAGE_BUCKET_NAME``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Your Amazon Web Services storage bucket name, as a string.
+
+``AWS_CALLING_FORMAT``
+~~~~~~~~~~~~~~~~~~~~~~
+
+The way you'd like to call the Amazon Web Services API, for instance if you
+prefer subdomains::
+
+    from S3 import CallingFormat
+    AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
+
+``AWS_HEADERS`` (optionnal)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you'd like to set headers sent with each file of the storage::
+
+    # see http://developer.yahoo.com/performance/rules.html#expires
+    AWS_HEADERS = {
+        'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', 
+        'Cache-Control': 'max-age=86400',
+        }
+
+
+Fields
+------
+
+Once you're done, ``default_storage`` will be the S3 storage::
+
+    >>> from django.core.files.storage import default_storage
+    >>> print default_storage.__class__
+    <class 'S3Storage.S3Storage'>
+
+This way, if you define a new ``FileField``, it will use the S3 storage::
+
+    >>> from django.db import models
+    >>> class Resume(models.Model):
+    ...     pdf = models.FileField(upload_to='pdfs')
+    ...     photos = models.ImageField(upload_to='photos')
+    ...
+    >>> resume = Resume()
+    >>> print resume.pdf.storage
+    <S3Storage.S3Storage object at ...>
+
+
+Tests
+=====
+
+Initialization::
+
+    >>> from django.core.files.storage import default_storage
+    >>> from django.core.files.base import ContentFile
+    >>> from django.core.cache import cache
+    >>> from models import MyStorage
+
+Storage
+-------
+
+Standard file access options are available, and work as expected::
+
+    >>> default_storage.exists('storage_test')
+    False
+    >>> file = default_storage.open('storage_test', 'w')
+    >>> file.write('storage contents')
+    >>> file.close()
+    
+    >>> default_storage.exists('storage_test')
+    True
+    >>> file = default_storage.open('storage_test', 'r')
+    >>> file.read()
+    'storage contents'
+    >>> file.close()
+    
+    >>> default_storage.delete('storage_test')
+    >>> default_storage.exists('storage_test')
+    False
+
+Model
+-----
+
+An object without a file has limited functionality::
+    
+    >>> obj1 = MyStorage()
+    >>> obj1.normal
+    <FieldFile: None>
+    >>> obj1.normal.size
+    Traceback (most recent call last):
+    ...
+    ValueError: The 'normal' attribute has no file associated with it.
+    
+Saving a file enables full functionality::
+    
+    >>> obj1.normal.save('django_test.txt', ContentFile('content'))
+    >>> obj1.normal
+    <FieldFile: tests/django_test.txt>
+    >>> obj1.normal.size
+    7
+    >>> obj1.normal.read()
+    'content'
+    
+Files can be read in a little at a time, if necessary::
+    
+    >>> obj1.normal.open()
+    >>> obj1.normal.read(3)
+    'con'
+    >>> obj1.normal.read()
+    'tent'
+    >>> '-'.join(obj1.normal.chunks(chunk_size=2))
+    'co-nt-en-t'
+    
+Save another file with the same name::
+    
+    >>> obj2 = MyStorage()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+    >>> obj2.normal.size
+    12
+    
+Push the objects into the cache to make sure they pickle properly::
+    
+    >>> cache.set('obj1', obj1)
+    >>> cache.set('obj2', obj2)
+    >>> cache.get('obj2').normal
+    <FieldFile: tests/django_test_.txt>
+    
+Deleting an object deletes the file it uses, if there are no other objects
+still using that file::
+    
+    >>> obj2.delete()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+    
+Default values allow an object to access a single file::
+    
+    >>> obj3 = MyStorage.objects.create()
+    >>> obj3.default
+    <FieldFile: tests/default.txt>
+    >>> obj3.default.read()
+    'default content'
+    
+But it shouldn't be deleted, even if there are no more objects using it::
+    
+    >>> obj3.delete()
+    >>> obj3 = MyStorage()
+    >>> obj3.default.read()
+    'default content'
+    
+Verify the fix for #5655, making sure the directory is only determined once::
+    
+    >>> obj4 = MyStorage()
+    >>> obj4.random.save('random_file', ContentFile('random content'))
+    >>> obj4.random
+    <FieldFile: .../random_file>
+    
+Clean up the temporary files::
+    
+    >>> obj1.normal.delete()
+    >>> obj2.normal.delete()
+    >>> obj3.default.delete()
+    >>> obj4.random.delete()
+
+"""
Add a comment to this file

storages_tests/__init__.py

Empty file removed.

storages_tests/manage.py

-# put patched django and S3 in PYTHONPATH
-import sys, os
-sys.path = [os.path.join(os.getcwd(), '../')] + sys.path
-
-from django.core.management import execute_manager
-
-try:
-    import settings # Assumed to be in the same directory.
-except ImportError:
-    import sys
-    sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
-    sys.exit(1)
-
-if __name__ == "__main__":
-    execute_manager(settings)

storages_tests/models.py

-
-import tempfile
-
-from django.db import models
-from django.core.files.base import ContentFile
-from django.core.files.storage import default_storage as s3_storage
-from django.core.cache import cache
-
-# Write out a file to be used as default content
-s3_storage.save('tests/default.txt', ContentFile('default content'))
-
-class MyStorage(models.Model):
-    def custom_upload_to(self, filename):
-        return 'foo'
-
-    def random_upload_to(self, filename):
-        # This returns a different result each time,
-        # to make sure it only gets called once.
-        import random
-        return '%s/%s' % (random.randint(100, 999), filename)
-
-    normal = models.FileField(storage=s3_storage, upload_to='tests')
-    custom = models.FileField(storage=s3_storage, upload_to=custom_upload_to)
-    random = models.FileField(storage=s3_storage, upload_to=random_upload_to)
-    default = models.FileField(storage=s3_storage, upload_to='tests', default='tests/default.txt')

storages_tests/tests.py

-"""
-=================
-Django S3 storage
-=================
-
-Usage
-=====
-
-Settings
---------
-
-``DEFAULT_FILE_STORAGE``
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-This setting store the path to the S3 storage class, the first part correspond
-to the filepath and the second the name of the class, if you've got
-``example.com`` in your ``PYTHONPATH`` and store your storage file in
-``example.com/libs/storages/S3Storage.py``, the resulting setting will be::
-
-    DEFAULT_FILE_STORAGE = 'libs.storages.S3Storage.S3Storage'
-
-If you keep the same filename as in repository, it should always end with 
-``S3Storage.S3Storage``.
-
-``AWS_ACCESS_KEY_ID``
-~~~~~~~~~~~~~~~~~~~~~
-
-Your Amazon Web Services access key, as a string.
-
-``AWS_SECRET_ACCESS_KEY``
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Your Amazon Web Services secret access key, as a string.
-
-``AWS_STORAGE_BUCKET_NAME``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Your Amazon Web Services storage bucket name, as a string.
-
-``AWS_CALLING_FORMAT``
-~~~~~~~~~~~~~~~~~~~~~~
-
-The way you'd like to call the Amazon Web Services API, for instance if you
-prefer subdomains::
-
-    from S3 import CallingFormat
-    AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
-
-``AWS_HEADERS`` (optionnal)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you'd like to set headers sent with each file of the storage::
-
-    # see http://developer.yahoo.com/performance/rules.html#expires
-    AWS_HEADERS = {
-        'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT', 
-        'Cache-Control': 'max-age=86400',
-        }
-
-
-Fields
-------
-
-Once you're done, ``default_storage`` will be the S3 storage::
-
-    >>> from django.core.files.storage import default_storage
-    >>> print default_storage.__class__
-    <class 'S3Storage.S3Storage'>
-
-This way, if you define a new ``FileField``, it will use the S3 storage::
-
-    >>> from django.db import models
-    >>> class Resume(models.Model):
-    ...     pdf = models.FileField(upload_to='pdfs')
-    ...     photos = models.ImageField(upload_to='photos')
-    ...
-    >>> resume = Resume()
-    >>> print resume.pdf.storage
-    <S3Storage.S3Storage object at ...>
-
-
-Tests
-=====
-
-Initialization::
-
-    >>> from django.core.files.storage import default_storage
-    >>> from django.core.files.base import ContentFile
-    >>> from django.core.cache import cache
-    >>> from models import MyStorage
-
-Storage
--------
-
-Standard file access options are available, and work as expected::
-
-    >>> default_storage.exists('storage_test')
-    False
-    >>> file = default_storage.open('storage_test', 'w')
-    >>> file.write('storage contents')
-    >>> file.close()
-    
-    >>> default_storage.exists('storage_test')
-    True
-    >>> file = default_storage.open('storage_test', 'r')
-    >>> file.read()
-    'storage contents'
-    >>> file.close()
-    
-    >>> default_storage.delete('storage_test')
-    >>> default_storage.exists('storage_test')
-    False
-
-Model
------
-
-An object without a file has limited functionality::
-    
-    >>> obj1 = MyStorage()
-    >>> obj1.normal
-    <FieldFile: None>
-    >>> obj1.normal.size
-    Traceback (most recent call last):
-    ...
-    ValueError: The 'normal' attribute has no file associated with it.
-    
-Saving a file enables full functionality::
-    
-    >>> obj1.normal.save('django_test.txt', ContentFile('content'))
-    >>> obj1.normal
-    <FieldFile: tests/django_test.txt>
-    >>> obj1.normal.size
-    7
-    >>> obj1.normal.read()
-    'content'
-