Rich Leland avatar Rich Leland committed bafb3c6

Created storages folder and moved backends into it.

Comments (0)

Files changed (23)

Add a comment to this file

backends/__init__.py

Empty file removed.

backends/couchdb.py

-"""
-This is a Custom Storage System for Django with CouchDB backend.
-Created by Christian Klein.
-(c) Copyright 2009 HUDORA GmbH. All Rights Reserved.
-"""
-import os
-from cStringIO import StringIO
-from urlparse import urljoin
-from urllib import quote_plus
-
-from django.conf import settings
-from django.core.files import File
-from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
-
-try:
-    import couchdb
-except ImportError:
-    raise ImproperlyConfigured, "Could not load couchdb dependency.\
-    \nSee http://code.google.com/p/couchdb-python/"
-
-DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984')
-STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {})
-
-
-class CouchDBStorage(Storage):
-    """
-    CouchDBStorage - a Django Storage class for CouchDB.
-
-    The CouchDBStorage can be configured in settings.py, e.g.::
-    
-        COUCHDB_STORAGE_OPTIONS = {
-            'server': "http://example.org", 
-            'database': 'database_name'
-        }
-
-    Alternatively, the configuration can be passed as a dictionary.
-    """
-    def __init__(self, **kwargs):
-        kwargs.update(STORAGE_OPTIONS)
-        self.base_url = kwargs.get('server', DEFAULT_SERVER)
-        server = couchdb.client.Server(self.base_url)
-        self.db = server[kwargs.get('database')]
-
-    def _put_file(self, name, content):
-        self.db[name] = {'size': len(content)}
-        self.db.put_attachment(self.db[name], content, filename='content')
-        return name
-
-    def get_document(self, name):
-        return self.db.get(name)
-
-    def _open(self, name, mode='rb'):
-        couchdb_file = CouchDBFile(name, self, mode=mode)
-        return couchdb_file
-
-    def _save(self, name, content):
-        content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
-        name = name.replace('/', '-')
-        return self._put_file(name, content_str)
-
-    def exists(self, name):
-        return name in self.db
-
-    def size(self, name):
-        doc = self.get_document(name)
-        if doc:
-            return doc['size']
-        return 0
-
-    def url(self, name):
-        return urljoin(self.base_url, 
-                       os.path.join(quote_plus(self.db.name), 
-                       quote_plus(name), 
-                       'content'))
-
-    def delete(self, name):
-        try:
-            del self.db[name]
-        except couchdb.client.ResourceNotFound:
-            raise IOError("File not found: %s" % name)
-
-    #def listdir(self, name):
-    # _all_docs?
-    #    pass
-
-
-class CouchDBFile(File):
-    """
-    CouchDBFile - a Django File-like class for CouchDB documents.
-    """
-
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-
-        try:
-            self._doc = self._storage.get_document(name)
-
-            tmp, ext = os.path.split(name)
-            if ext:
-                filename = "content." + ext
-            else:
-                filename = "content"
-            attachment = self._storage.db.get_attachment(self._doc, filename=filename)
-            self.file = StringIO(attachment)
-        except couchdb.client.ResourceNotFound:
-            if 'r' in self._mode:
-                raise ValueError("The file cannot be reopened.")
-            else:
-                self.file = StringIO()
-                self._is_dirty = True
-
-    @property
-    def size(self):
-        return self._doc['size']
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._put_file(self._name, self.file.getvalue())
-        self.file.close()
-
-

backends/database.py

-# DatabaseStorage for django.
-# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
-import StringIO
-import urlparse
-
-from django.conf import settings
-from django.core.files import File
-from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
-
-try:
-    import pyodbc
-except ImportError:
-    raise ImproperlyConfigured, "Could not load pyodbc dependency.\
-    \nSee http://code.google.com/p/pyodbc/"
-
-
-class DatabaseStorage(Storage):
-    """
-    Class DatabaseStorage provides storing files in the database. 
-    """
-
-    def __init__(self, option=settings.DB_FILES):
-        """Constructor. 
-        
-        Constructs object using dictionary either specified in contucotr or
-in settings.DB_FILES. 
-        
-        @param option dictionary with 'db_table', 'fname_column',
-'blob_column', 'size_column', 'base_url'  keys. 
-        
-        option['db_table']
-            Table to work with.
-        option['fname_column']
-            Column in the 'db_table' containing filenames (filenames can
-contain pathes). Values should be the same as where FileField keeps
-filenames. 
-            It is used to map filename to blob_column. In sql it's simply
-used in where clause. 
-        option['blob_column']
-            Blob column (for example 'image' type), created manually in the
-'db_table', used to store image.
-        option['size_column']
-            Column to store file size. Used for optimization of size()
-method (another way is to open file and get size)
-        option['base_url']
-            Url prefix used with filenames. Should be mapped to the view,
-that returns an image as result. 
-        """
-        
-        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
-                              and option.has_key('size_column') and option.has_key('base_url') ):
-            raise ValueError("You didn't specify required options")
-        self.db_table = option['db_table']
-        self.fname_column = option['fname_column']
-        self.blob_column = option['blob_column']
-        self.size_column = option['size_column']
-        self.base_url = option['base_url']
-
-        #get database settings
-        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
-        self.DATABASE_NAME = settings.DATABASE_NAME
-        self.DATABASE_USER = settings.DATABASE_USER
-        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
-        self.DATABASE_HOST = settings.DATABASE_HOST
-        
-        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
-                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
-        self.cursor = self.connection.cursor()
-
-    def _open(self, name, mode='rb'):
-        """Open a file from database. 
-        
-        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
-        If there is no such file in the db, returs None
-        """
-        
-        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
-
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
-        if row is None:
-            return None
-        inMemFile = StringIO.StringIO(row[0])
-        inMemFile.name = name
-        inMemFile.mode = mode
-        
-        retFile = File(inMemFile)
-        return retFile
-
-    def _save(self, name, content):
-        """Save 'content' as file named 'name'.
-        
-        @note '\' in path will be converted to '/'. 
-        """
-        
-        name = name.replace('\\', '/')
-        binary = pyodbc.Binary(content.read())
-        size = len(binary)
-        
-        #todo: check result and do something (exception?) if failed.
-        if self.exists(name):
-            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
-                                 (binary, size)  )
-        else:
-            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
-        self.connection.commit()
-        return name
-
-    def exists(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
-        return row is not None
-    
-    def get_available_name(self, name):
-        return name
-
-    def delete(self, name):
-        if self.exists(name):
-            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
-            self.connection.commit()
-
-    def url(self, name):
-        if self.base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
-    
-    def size(self, name):
-        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
-        if row is None:
-            return 0
-        else:
-            return int(row[0])

backends/ftp.py

-# FTP storage class for Django pluggable storage system.
-# Author: Rafal Jonca <jonca.rafal@gmail.com>
-# License: MIT
-# Comes from http://www.djangosnippets.org/snippets/1269/
-#
-# Usage:
-#
-# Add below to settings.py:
-# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
-#
-# In models.py you can write:
-# from FTPStorage import FTPStorage
-# fs = FTPStorage()
-# class FTPTest(models.Model):
-#     file = models.FileField(upload_to='a/b/c/', storage=fs)
-
-import os
-import ftplib
-import urlparse
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.core.exceptions import ImproperlyConfigured
-
-
-class FTPStorageException(Exception): pass
-
-class FTPStorage(Storage):
-    """FTP Storage class for Django pluggable storage system."""
-
-    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
-        self._config = self._decode_location(location)
-        self._base_url = base_url
-        self._connection = None
-
-    def _decode_location(self, location):
-        """Return splitted configuration data from location."""
-        splitted_url = urlparse.urlparse(location)
-        config = {}
-        
-        if splitted_url.scheme not in ('ftp', 'aftp'):
-            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
-        if splitted_url.hostname == '':
-            raise ImproperlyConfigured('You must at least provide hostname!')
-            
-        if splitted_url.scheme == 'aftp':
-            config['active'] = True
-        else:
-            config['active'] = False
-        config['path'] = splitted_url.path
-        config['host'] = splitted_url.hostname
-        config['user'] = splitted_url.username
-        config['passwd'] = splitted_url.password
-        config['port'] = int(splitted_url.port)
-        
-        return config
-
-    def _start_connection(self):
-        # Check if connection is still alive and if not, drop it.
-        if self._connection is not None:
-            try:
-                self._connection.pwd()
-            except ftplib.all_errors, e:
-                self._connection = None
-        
-        # Real reconnect
-        if self._connection is None:
-            ftp = ftplib.FTP()
-            try:
-                ftp.connect(self._config['host'], self._config['port'])
-                ftp.login(self._config['user'], self._config['passwd'])
-                if self._config['active']:
-                    ftp.set_pasv(False)
-                if self._config['path'] != '':
-                    ftp.cwd(self._config['path'])
-                self._connection = ftp
-                return
-            except ftplib.all_errors, e:
-                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
-
-    def disconnect(self):
-        self._connection.quit()
-        self._connection = None
-
-    def _mkremdirs(self, path):
-        pwd = self._connection.pwd()
-        path_splitted = path.split('/')
-        for path_part in path_splitted:
-            try:
-                self._connection.cwd(path_part)
-            except:
-                try:
-                    self._connection.mkd(path_part)
-                    self._connection.cwd(path_part)
-                except ftplib.all_errors, e:
-                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
-        self._connection.cwd(pwd)
-        return
-
-    def _put_file(self, name, content):
-        # Connection must be open!
-        try:
-            self._mkremdirs(os.path.dirname(name))
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            self._connection.storbinary('STOR ' + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE)
-            self._connection.cwd(pwd)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error writing file %s' % name)
-
-    def _open(self, name, mode='rb'):
-        remote_file = FTPStorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name):
-        memory_file = StringIO()
-        try:
-            pwd = self._connection.pwd()
-            self._connection.cwd(os.path.dirname(name))
-            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
-            self._connection.cwd(pwd)
-            return memory_file
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error reading file %s' % name)
-        
-    def _save(self, name, content):
-        content.open()
-        self._start_connection()
-        self._put_file(name, content)
-        content.close()
-        return name
-
-    def _get_dir_details(self, path):
-        # Connection must be open!
-        try:
-            lines = []
-            self._connection.retrlines('LIST '+path, lines.append)
-            dirs = {}
-            files = {}
-            for line in lines:
-                words = line.split()
-                if len(words) < 6:
-                    continue
-                if words[-2] == '->':
-                    continue
-                if words[0][0] == 'd':
-                    dirs[words[-1]] = 0;
-                elif words[0][0] == '-':
-                    files[words[-1]] = int(words[-5]);
-            return dirs, files
-        except ftplib.all_errors, msg:
-            raise FTPStorageException('Error getting listing for %s' % path)
-
-    def listdir(self, path):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(path)
-            return dirs.keys(), files.keys()
-        except FTPStorageException, e:
-            raise
-
-    def delete(self, name):
-        if not self.exists(name):
-            return
-        self._start_connection()
-        try:
-            self._connection.delete(name)
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when removing %s' % name)                 
-
-    def exists(self, name):
-        self._start_connection()
-        try:
-            if os.path.basename(name) in self._connection.nlst(os.path.dirname(name) + '/'):
-                return True
-            else:
-                return False
-        except ftplib.error_temp, e:
-            return False
-        except ftplib.error_perm, e:
-            # error_perm: 550 Can't find file
-            return False
-        except ftplib.all_errors, e:
-            raise FTPStorageException('Error when testing existence of %s' % name)            
-
-    def size(self, name):
-        self._start_connection()
-        try:
-            dirs, files = self._get_dir_details(os.path.dirname(name))
-            if os.path.basename(name) in files:
-                return files[os.path.basename(name)]
-            else:
-                return 0
-        except FTPStorageException, e:
-            return 0
-
-    def url(self, name):
-        if self._base_url is None:
-            raise ValueError("This file is not accessible via a URL.")
-        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
-
-class FTPStorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self._is_read = False
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if not self._is_read:
-            self._storage._start_connection()
-            self.file = self._storage._read(self._name)
-            self._storage._end_connection()
-            self._is_read = True
-            
-        return self.file.read(num_bytes)
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-        self._is_read = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._start_connection()
-            self._storage._put_file(self._name, self.file.getvalue())
-            self._storage._end_connection()
-        self.file.close()

backends/image.py

-
-import os
-
-from django.core.files.storage import FileSystemStorage
-from django.core.exceptions import ImproperlyConfigured
-
-try:
-    from PIL import ImageFile as PILImageFile
-except ImportError:
-    raise ImproperlyConfigured, "Could not load PIL dependency.\
-    \nSee http://www.pythonware.com/products/pil/"
-
-
-class ImageStorage(FileSystemStorage):
-    """
-    A FileSystemStorage which normalizes extensions for images.
-    
-    Comes from http://www.djangosnippets.org/snippets/965/
-    """
-    
-    def find_extension(self, format):
-        """Normalizes PIL-returned format into a standard, lowercase extension."""
-        format = format.lower()
-        
-        if format == 'jpeg':
-            format = 'jpg'
-        
-        return format
-    
-    def save(self, name, content):
-        dirname = os.path.dirname(name)
-        basename = os.path.basename(name)
-        
-        # Use PIL to determine filetype
-        
-        p = PILImageFile.Parser()
-        while 1:
-            data = content.read(1024)
-            if not data:
-                break
-            p.feed(data)
-            if p.image:
-                im = p.image
-                break
-        
-        extension = self.find_extension(im.format)
-        
-        # Does the basename already have an extension? If so, replace it.
-        # bare as in without extension
-        bare_basename, _ = os.path.splitext(basename)
-        basename = bare_basename + '.' + extension
-        
-        name = os.path.join(dirname, basename)
-        return super(ImageStorage, self).save(name, content)
-    

backends/mogile.py

-import urlparse
-import mimetypes
-from StringIO import StringIO
-
-from django.conf import settings
-from django.core.cache import cache
-from django.utils.text import force_unicode
-from django.core.files.storage import Storage
-from django.http import HttpResponse, HttpResponseNotFound
-from django.core.exceptions import ImproperlyConfigured
-
-try:
-    import mogilefs
-except ImportError:
-    raise ImproperlyConfigured, "Could not load mogilefs dependency.\
-    \nSee http://mogilefs.pbworks.com/Client-Libraries"
-
-
-class MogileFSStorage(Storage):
-    """MogileFS filesystem storage"""
-    def __init__(self, base_url=settings.MEDIA_URL):
-        
-        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
-        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
-            self.base_url = settings.MOGILEFS_MEDIA_URL
-        else:
-            self.base_url = base_url
-                
-        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
-            if not hasattr(settings, var):
-                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
-            
-        self.trackers = settings.MOGILEFS_TRACKERS
-        self.domain = settings.MOGILEFS_DOMAIN
-        self.client = mogilefs.Client(self.domain, self.trackers)
-    
-    def get_mogile_paths(self, filename):
-        return self.client.get_paths(filename)  
-    
-    # The following methods define the Backend API
-
-    def filesize(self, filename):
-        raise NotImplemented
-        #return os.path.getsize(self._get_absolute_path(filename))
-    
-    def path(self, filename):
-        paths = self.get_mogile_paths(filename)
-        if paths:
-            return self.get_mogile_paths(filename)[0]
-        else:
-            return None
-    
-    def url(self, filename):
-        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
-
-    def open(self, filename, mode='rb'):
-        raise NotImplemented
-        #return open(self._get_absolute_path(filename), mode)
-
-    def exists(self, filename):
-        return filename in self.client
-
-    def save(self, filename, raw_contents):
-        filename = self.get_available_filename(filename)
-        
-        if not hasattr(self, 'mogile_class'):
-            self.mogile_class = None
-
-        # Write the file to mogile
-        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
-        if success:
-            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
-        else:
-            print "FAILURE writing file %s" % (filename)
-
-        return force_unicode(filename.replace('\\', '/'))
-
-    def delete(self, filename):
-        
-        self.client.delete(filename)
-            
-        
-def serve_mogilefs_file(request, key=None):
-    """
-    Called when a user requests an image.
-    Either reproxy the path to perlbal, or serve the image outright
-    """
-    # not the best way to do this, since we create a client each time
-    mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream"
-    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
-    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
-        # we're reproxying with perlbal
-        
-        # check the path cache
-        
-        path = cache.get(key)
-
-        if not path:
-            path = client.get_paths(key)
-            cache.set(key, path, 60)
-    
-        if path:
-            response = HttpResponse(content_type=mimetype)
-            response['X-REPROXY-URL'] = path[0]
-        else:
-            response = HttpResponseNotFound()
-    
-    else:
-        # we don't have perlbal, let's just serve the image via django
-        file_data = client[key]
-        if file_data:
-            response = HttpResponse(file_data, mimetype=mimetype)
-        else:
-            response = HttpResponseNotFound()
-    
-    return response

backends/mosso.py

-"""
-Custom storage for django with Mosso Cloud Files backend.
-Created by Rich Leland <rich@richleland.com>.
-"""
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files import File
-from django.core.files.storage import Storage
-from django.utils.text import get_valid_filename
-
-try:
-    import cloudfiles
-    from cloudfiles.errors import NoSuchObject
-except ImportError:
-    raise ImproperlyConfigured("Could not load cloudfiles dependency. See "
-                               "http://www.mosso.com/cloudfiles.jsp.")
-
-# TODO: implement TTL into cloudfiles methods
-CLOUDFILES_TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
-
-
-def cloudfiles_upload_to(self, filename):
-    """
-    Simple, custom upload_to because Cloud Files doesn't support
-    nested containers (directories).
-
-    Actually found this out from @minter:
-    @richleland The Cloud Files APIs do support pseudo-subdirectories, by
-    creating zero-byte files with type application/directory.
-
-    May implement in a future version.
-    """
-    return get_valid_filename(filename)
-
-
-class CloudFilesStorage(Storage):
-    """
-    Custom storage for Mosso Cloud Files.
-    """
-    default_quick_listdir = True
-
-    def __init__(self, username=None, api_key=None, container=None,
-                 connection_kwargs=None):
-        """
-        Initialize the settings for the connection and container.
-        """
-        self.username = username or settings.CLOUDFILES_USERNAME
-        self.api_key = api_key or settings.CLOUDFILES_API_KEY
-        self.container_name = container or settings.CLOUDFILES_CONTAINER
-        self.connection_kwargs = connection_kwargs or {}
-
-    def __getstate__(self):
-        """
-        Return a picklable representation of the storage.
-        """
-        return dict(username=self.username,
-                    api_key=self.api_key,
-                    container_name=self.container_name,
-                    connection_kwargs=self.connection_kwargs)
-
-    def _get_connection(self):
-        if not hasattr(self, '_connection'):
-            self._connection = cloudfiles.get_connection(self.username,
-                                    self.api_key, **self.connection_kwargs)
-        return self._connection
-
-    def _set_connection(self, value):
-        self._connection = value
-
-    connection = property(_get_connection, _set_connection)
-
-    def _get_container(self):
-        if not hasattr(self, '_container'):
-            self.container = self.connection.get_container(
-                                                        self.container_name)
-        return self._container
-
-    def _set_container(self, container):
-        """
-        Set the container, making it publicly available (on Limelight CDN) if
-        it is not already.
-        """
-        if not container.is_public():
-            container.make_public()
-        if hasattr(self, '_container_public_uri'):
-            delattr(self, '_container_public_uri')
-        self._container = container
-
-    container = property(_get_container, _set_container)
-
-    def _get_container_url(self):
-        if not hasattr(self, '_container_public_uri'):
-            self._container_public_uri = self.container.public_uri()
-        return self._container_public_uri
-
-    container_url = property(_get_container_url)
-
-    def _get_cloud_obj(self, name):
-        """
-        Helper function to get retrieve the requested Cloud Files Object.
-        """
-        return self.container.get_object(name)
-
-    def _open(self, name, mode='rb'):
-        """
-        Return the CloudFilesStorageFile.
-        """
-        return CloudFilesStorageFile(storage=self, name=name)
-
-    def _save(self, name, content):
-        """
-        Use the Cloud Files service to write ``content`` to a remote file
-        (called ``name``).
-        """
-        content.open()
-        cloud_obj = self.container.create_object(name)
-        cloud_obj.size = content.file.size
-        # If the content type is available, pass it in directly rather than
-        # getting the cloud object to try to guess.
-        if hasattr(content.file, 'content_type'):
-            cloud_obj.content_type = content.file.content_type
-        cloud_obj.send(content)
-        content.close()
-        return name
-
-    def delete(self, name):
-        """
-        Deletes the specified file from the storage system.
-        """
-        self.container.delete_object(name)
-
-    def exists(self, name):
-        """
-        Returns True if a file referenced by the given name already exists in
-        the storage system, or False if the name is available for a new file.
-        """
-        try:
-            self._get_cloud_obj(name)
-            return True
-        except NoSuchObject:
-            return False
-
-    def listdir(self, path):
-        """
-        Lists the contents of the specified path, returning a 2-tuple; the
-        first being an empty list of directories (not available for quick-
-        listing), the second being a list of filenames.
-
-        If the list of directories is required, use the full_listdir method.
-        """
-        files = []
-        if path and not path.endswith('/'):
-            path = '%s/' % path
-        path_len = len(path)
-        for name in self.container.list_objects(path=path):
-            files.append(name[path_len:])
-        return ([], files)
-
-    def full_listdir(self, path):
-        """
-        Lists the contents of the specified path, returning a 2-tuple of lists;
-        the first item being directories, the second item being files.
-
-        On large containers, this may be a slow operation for root containers
-        because every single object must be returned (cloudfiles does not
-        provide an explicit way of listing directories).
-        """
-        dirs = set()
-        files = []
-        if path and not path.endswith('/'):
-            path = '%s/' % path
-        path_len = len(path)
-        for name in self.container.list_objects(prefix=path):
-            name = name[path_len:]
-            slash = name[1:-1].find('/') + 1
-            if slash:
-                dirs.add(name[:slash])
-            elif name:
-                files.append(name)
-        dirs = list(dirs)
-        dirs.sort()
-        return (dirs, files)
-
-    def size(self, name):
-        """
-        Returns the total size, in bytes, of the file specified by name.
-        """
-        return self._get_cloud_obj(name).size
-
-    def url(self, name):
-        """
-        Returns an absolute URL where the file's contents can be accessed
-        directly by a web browser.
-        """
-        return '%s/%s' % (self.container_url, name)
-
-
-class CloudFilesStorageFile(File):
-    closed = False
-
-    def __init__(self, storage, name, *args, **kwargs):
-        self._storage = storage
-        super(CloudFilesStorageFile, self).__init__(file=None, name=name,
-                                                    *args, **kwargs)
-
-    def _get_size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self.name)
-        return self._size
-
-    def _set_size(self, size):
-        self._size = size
-
-    size = property(_get_size, _set_size)
-
-    def _get_file(self):
-        if not hasattr(self, '_file'):
-            self._file = self._storage._get_cloud_obj(self.name)
-        return self._file
-
-    def _set_file(self, value):
-        if value is None:
-            if hasattr(self, '_file'):
-                del self._file
-        else:
-            self._file = value
-
-    file = property(_get_file, _set_file)
-
-    def read(self, num_bytes=None):
-        data = self.file.read(size=num_bytes or -1, offset=self._pos)
-        self._pos += len(data)
-        return data
-
-    def open(self, *args, **kwargs):
-        """
-        Open the cloud file object.
-        """
-        self.file
-        self._pos = 0
-
-    def close(self, *args, **kwargs):
-        self._pos = 0
-
-    @property
-    def closed(self):
-        return not hasattr(self, '_file')
-
-    def seek(self, pos):
-        self._pos = pos

backends/overwrite.py

-import os
-
-from django.conf import settings
-from django.core.files.storage import FileSystemStorage
-
-class OverwriteStorage(FileSystemStorage):
-    """
-    Comes from http://www.djangosnippets.org/snippets/976/
-    (even if it already exists in S3Storage for ages)
-    
-    See also Django #4339, which might add this functionality to core.
-    """
-    
-    def get_available_name(self, name):
-        """
-        Returns a filename that's free on the target storage system, and
-        available for new content to be written to.
-        """
-        if self.exists(name):
-            self.delete(name)
-        return name

backends/s3.py

-import os
-import mimetypes
-
-try:
-    from cStringIO import StringIO
-except ImportError:
-    from StringIO import StringIO
-
-from django.conf import settings
-from django.core.exceptions import ImproperlyConfigured
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-
-try:
-    from S3 import AWSAuthConnection, QueryStringAuthGenerator
-except ImportError:
-    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
-    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-HEADERS = 'AWS_HEADERS'
-
-DEFAULT_ACL= getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
-QUERYSTRING_ACTIVE= getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
-QUERYSTRING_EXPIRE= getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
-SECURE_URLS= getattr(settings, 'AWS_S3_SECURE_URLS', False)
-
-
-class S3Storage(Storage):
-    """Amazon Simple Storage Service"""
-
-    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
-            access_key=None, secret_key=None, acl=DEFAULT_ACL,
-            calling_format=settings.AWS_CALLING_FORMAT):
-        self.bucket = bucket
-        self.acl = acl
-
-        if not access_key and not secret_key:
-            access_key, secret_key = self._get_access_keys()
-
-        self.connection = AWSAuthConnection(access_key, secret_key,
-                            calling_format=calling_format)
-        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
-                            calling_format=calling_format,
-                            is_secure=SECURE_URLS)
-        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
-        
-        self.headers = getattr(settings, HEADERS, {})
-
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-
-        return None, None
-
-    def _get_connection(self):
-        return AWSAuthConnection(*self._get_access_keys())
-
-    def _clean_name(self, name):
-        # Useful for windows' paths
-        return os.path.normpath(name).replace('\\', '/')
-
-    def _put_file(self, name, content):
-        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
-        self.headers.update({
-            'x-amz-acl': self.acl, 
-            'Content-Type': content_type,
-            'Content-Length' : len(content),
-        })
-        response = self.connection.put(self.bucket, name, content, self.headers)
-        if response.http_response.status not in (200, 206):
-            raise IOError("S3StorageError: %s" % response.message)
-
-    def _open(self, name, mode='rb'):
-        name = self._clean_name(name)
-        remote_file = S3StorageFile(name, self, mode=mode)
-        return remote_file
-
-    def _read(self, name, start_range=None, end_range=None):
-        name = self._clean_name(name)
-        if start_range is None:
-            headers = {}
-        else:
-            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
-        response = self.connection.get(self.bucket, name, headers)
-        if response.http_response.status not in (200, 206):
-            raise IOError("S3StorageError: %s" % response.message)
-        headers = response.http_response.msg
-        return response.object.data, headers.get('etag', None), headers.get('content-range', None)
-        
-    def _save(self, name, content):
-        name = self._clean_name(name)
-        content.open()
-        if hasattr(content, 'chunks'):
-            content_str = ''.join(chunk for chunk in content.chunks())
-        else:
-            content_str = content.read()
-        self._put_file(name, content_str)
-        return name
-    
-    def delete(self, name):
-        name = self._clean_name(name)
-        response = self.connection.delete(self.bucket, name)
-        if response.http_response.status != 204:
-            raise IOError("S3StorageError: %s" % response.message)
-
-    def exists(self, name):
-        name = self._clean_name(name)
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        return response.status == 200
-
-    def size(self, name):
-        name = self._clean_name(name)
-        response = self.connection._make_request('HEAD', self.bucket, name)
-        content_length = response.getheader('Content-Length')
-        return content_length and int(content_length) or 0
-    
-    def url(self, name):
-        name = self._clean_name(name)
-        if QUERYSTRING_ACTIVE:
-            return self.generator.generate_url('GET', self.bucket, name)
-        else:
-            return self.generator.make_bare_url(self.bucket, name)
-
-    ## UNCOMMENT BELOW IF NECESSARY
-    #def get_available_name(self, name):
-    #    """ Overwrite existing file with the same name. """
-    #    name = self._clean_name(name)
-    #    return name
-
-
-class S3StorageFile(File):
-    def __init__(self, name, storage, mode):
-        self._name = name
-        self._storage = storage
-        self._mode = mode
-        self._is_dirty = False
-        self.file = StringIO()
-        self.start_range = 0
-    
-    @property
-    def size(self):
-        if not hasattr(self, '_size'):
-            self._size = self._storage.size(self._name)
-        return self._size
-
-    def read(self, num_bytes=None):
-        if num_bytes is None:
-            args = []
-            self.start_range = 0
-        else:
-            args = [self.start_range, self.start_range+num_bytes-1]
-        data, etags, content_range = self._storage._read(self._name, *args)
-        if content_range is not None:
-            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
-            start_range, end_range = current_range.split('-', 1)
-            self._size, self.start_range = int(size), int(end_range)+1
-        self.file = StringIO(data)
-        return self.file.getvalue()
-
-    def write(self, content):
-        if 'w' not in self._mode:
-            raise AttributeError("File was opened for read-only access.")
-        self.file = StringIO(content)
-        self._is_dirty = True
-
-    def close(self):
-        if self._is_dirty:
-            self._storage._put_file(self._name, self.file.getvalue())
-        self.file.close()

backends/s3boto.py

-import os
-
-from django.conf import settings
-from django.core.files.base import File
-from django.core.files.storage import Storage
-from django.utils.functional import curry
-from django.core.exceptions import ImproperlyConfigured
-
-try:
-    from boto.s3.connection import S3Connection
-    from boto.s3.key import Key
-except ImportError:
-    raise ImproperlyConfigured, "Could not load Boto's S3 bindings.\
-    \nSee http://code.google.com/p/boto/"
-
-ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
-SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
-HEADERS         = 'AWS_HEADERS'
-BUCKET_NAME     = 'AWS_STORAGE_BUCKET_NAME'
-DEFAULT_ACL     = 'AWS_DEFAULT_ACL'
-QUERYSTRING_AUTH = 'AWS_QUERYSTRING_AUTH'
-QUERYSTRING_EXPIRE = 'AWS_QUERYSTRING_EXPIRE'
-
-BUCKET_PREFIX     = getattr(settings, BUCKET_NAME, {})
-HEADERS           = getattr(settings, HEADERS, {})
-DEFAULT_ACL       = getattr(settings, DEFAULT_ACL, 'public-read')
-QUERYSTRING_AUTH  = getattr(settings, QUERYSTRING_AUTH, True)
-QUERYSTRING_EXPIRE= getattr(settings, QUERYSTRING_EXPIRE, 3600)
-
-
-class S3BotoStorage(Storage):
-    """Amazon Simple Storage Service using Boto"""
-    
-    def __init__(self, bucket="root", bucketprefix=BUCKET_PREFIX, 
-            access_key=None, secret_key=None, acl=DEFAULT_ACL, headers=HEADERS):
-        self.acl = acl
-        self.headers = headers
-        
-        if not access_key and not secret_key:
-             access_key, secret_key = self._get_access_keys()
-        
-        self.connection = S3Connection(access_key, secret_key)
-        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
-        self.bucket.set_acl(self.acl)
-    
-    def _get_access_keys(self):
-        access_key = getattr(settings, ACCESS_KEY_NAME, None)
-        secret_key = getattr(settings, SECRET_KEY_NAME, None)
-        if (access_key or secret_key) and (not access_key or not secret_key):
-            access_key = os.environ.get(ACCESS_KEY_NAME)
-            secret_key = os.environ.get(SECRET_KEY_NAME)
-        
-        if access_key and secret_key:
-            # Both were provided, so use them
-            return access_key, secret_key
-        
-        return None, None
-    
-    def _clean_name(self, name):
-        # Useful for windows' paths
-        return os.path.normpath(name).replace('\\', '/')
-
-    def _open(self, name, mode='rb'):
-        name = self._clean_name(name)
-        return S3BotoStorageFile(name, mode, self)
-    
-    def _save(self, name, content):
-        name = self._clean_name(name)
-        headers = self.headers
-        if hasattr(content.file, 'content_type'):
-            headers['Content-Type'] = content.file.content_type
-        content.name = name
-        k = self.bucket.get_key(name)
-        if not k:
-            k = self.bucket.new_key(name)
-        k.set_contents_from_file(content, headers=headers, policy=self.acl)
-        return name
-    
-    def delete(self, name):
-        name = self._clean_name(name)
-        self.bucket.delete_key(name)
-    
-    def exists(self, name):
-        name = self._clean_name(name)
-        k = Key(self.bucket, name)
-        return k.exists()
-    
-    def listdir(self, name):
-        name = self._clean_name(name)
-        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
-    
-    def size(self, name):
-        name = self._clean_name(name)
-        return self.bucket.get_key(name).size
-    
-    def url(self, name):
-        name = self._clean_name(name)
-        return self.bucket.get_key(name).generate_url(QUERYSTRING_EXPIRE, method='GET', query_auth=QUERYSTRING_AUTH)
-    
-    def get_available_name(self, name):
-        """ Overwrite existing file with the same name. """
-        name = self._clean_name(name)
-        return name
-
-
-class S3BotoStorageFile(File):
-    def __init__(self, name, mode, storage):
-        self._storage = storage
-        self._name = name
-        self._mode = mode
-        self.key = storage.bucket.get_key(name)
-    
-    def size(self):
-        return self.key.size
-    
-    def read(self, *args, **kwargs):
-        return self.key.read(*args, **kwargs)
-    
-    def write(self, content):
-        self.key.set_contents_from_string(content, headers=self._storage.headers, acl=self._storage.acl)
-    
-    def close(self):
-        self.key.close()

backends/symlinkorcopy.py

-import os
-
-from django.conf import settings
-from django.core.files.storage import FileSystemStorage
-
-__doc__ = """
-I needed to efficiently create a mirror of a directory tree (so that 
-"origin pull" CDNs can automatically pull files). The trick was that 
-some files could be modified, and some could be identical to the original. 
-Of course it doesn't make sense to store the exact same data twice on the 
-file system. So I created SymlinkOrCopyStorage.
-
-SymlinkOrCopyStorage allows you to symlink a file when it's identical to 
-the original file and to copy the file if it's modified.
-Of course, it's impossible to know if a file is modified just by looking 
-at the file, without knowing what the original file was.
-That's what the symlinkWithin parameter is for. It accepts one or more paths 
-(if multiple, they should be concatenated using a colon (:)). 
-Files that will be saved using SymlinkOrCopyStorage are then checked on their 
-location: if they are within one of the symlink_within directories, 
-they will be symlinked, otherwise they will be copied.
-
-The rationale is that unmodified files will exist in their original location, 
-e.g. /htdocs/example.com/image.jpg and modified files will be stored in 
-a temporary directory, e.g. /tmp/image.jpg.
-"""
-
-class SymlinkOrCopyStorage(FileSystemStorage):
-    """Stores symlinks to files instead of actual files whenever possible
-    
-    When a file that's being saved is currently stored in the symlink_within
-    directory, then symlink the file. Otherwise, copy the file.
-    """
-    def __init__(self, location=settings.MEDIA_ROOT, base_url=settings.MEDIA_URL, 
-            symlink_within=None):
-        super(SymlinkOrCopyStorage, self).__init__(location, base_url)
-        self.symlink_within = symlink_within.split(":")
-
-    def _save(self, name, content):
-        full_path_dst = self.path(name)
-
-        directory = os.path.dirname(full_path_dst)
-        if not os.path.exists(directory):
-            os.makedirs(directory)
-        elif not os.path.isdir(directory):
-            raise IOError("%s exists and is not a directory." % directory)
-
-        full_path_src = os.path.abspath(content.name)
-
-        symlinked = False
-        # Only symlink if the current platform supports it.
-        if getattr(os, "symlink", False):
-            for path in self.symlink_within:
-                if full_path_src.startswith(path):
-                    os.symlink(full_path_src, full_path_dst)
-                    symlinked = True
-                    break
-
-        if not symlinked:
-            super(SymlinkOrCopyStorage, self)._save(name, content)
-
-        return name

Empty file added.

Add a comment to this file

storages/backends/__init__.py

Empty file added.

storages/backends/couchdb.py

+"""
+This is a Custom Storage System for Django with CouchDB backend.
+Created by Christian Klein.
+(c) Copyright 2009 HUDORA GmbH. All Rights Reserved.
+"""
+import os
+from cStringIO import StringIO
+from urlparse import urljoin
+from urllib import quote_plus
+
+from django.conf import settings
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import couchdb
+except ImportError:
+    raise ImproperlyConfigured, "Could not load couchdb dependency.\
+    \nSee http://code.google.com/p/couchdb-python/"
+
+DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984')
+STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {})
+
+
+class CouchDBStorage(Storage):
+    """
+    CouchDBStorage - a Django Storage class for CouchDB.
+
+    The CouchDBStorage can be configured in settings.py, e.g.::
+    
+        COUCHDB_STORAGE_OPTIONS = {
+            'server': "http://example.org", 
+            'database': 'database_name'
+        }
+
+    Alternatively, the configuration can be passed as a dictionary.
+    """
+    def __init__(self, **kwargs):
+        kwargs.update(STORAGE_OPTIONS)
+        self.base_url = kwargs.get('server', DEFAULT_SERVER)
+        server = couchdb.client.Server(self.base_url)
+        self.db = server[kwargs.get('database')]
+
+    def _put_file(self, name, content):
+        self.db[name] = {'size': len(content)}
+        self.db.put_attachment(self.db[name], content, filename='content')
+        return name
+
+    def get_document(self, name):
+        return self.db.get(name)
+
+    def _open(self, name, mode='rb'):
+        couchdb_file = CouchDBFile(name, self, mode=mode)
+        return couchdb_file
+
+    def _save(self, name, content):
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        name = name.replace('/', '-')
+        return self._put_file(name, content_str)
+
+    def exists(self, name):
+        return name in self.db
+
+    def size(self, name):
+        doc = self.get_document(name)
+        if doc:
+            return doc['size']
+        return 0
+
+    def url(self, name):
+        return urljoin(self.base_url, 
+                       os.path.join(quote_plus(self.db.name), 
+                       quote_plus(name), 
+                       'content'))
+
+    def delete(self, name):
+        try:
+            del self.db[name]
+        except couchdb.client.ResourceNotFound:
+            raise IOError("File not found: %s" % name)
+
+    #def listdir(self, name):
+    # _all_docs?
+    #    pass
+
+
+class CouchDBFile(File):
+    """
+    CouchDBFile - a Django File-like class for CouchDB documents.
+    """
+
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+
+        try:
+            self._doc = self._storage.get_document(name)
+
+            tmp, ext = os.path.split(name)
+            if ext:
+                filename = "content." + ext
+            else:
+                filename = "content"
+            attachment = self._storage.db.get_attachment(self._doc, filename=filename)
+            self.file = StringIO(attachment)
+        except couchdb.client.ResourceNotFound:
+            if 'r' in self._mode:
+                raise ValueError("The file cannot be reopened.")
+            else:
+                self.file = StringIO()
+                self._is_dirty = True
+
+    @property
+    def size(self):
+        return self._doc['size']
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._put_file(self._name, self.file.getvalue())
+        self.file.close()
+
+

storages/backends/database.py

+# DatabaseStorage for django.
+# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
+import StringIO
+import urlparse
+
+from django.conf import settings
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import pyodbc
+except ImportError:
+    raise ImproperlyConfigured, "Could not load pyodbc dependency.\
+    \nSee http://code.google.com/p/pyodbc/"
+
+
+class DatabaseStorage(Storage):
+    """
+    Class DatabaseStorage provides storing files in the database. 
+    """
+
+    def __init__(self, option=settings.DB_FILES):
+        """Constructor. 
+        
+        Constructs object using dictionary either specified in contucotr or
+in settings.DB_FILES. 
+        
+        @param option dictionary with 'db_table', 'fname_column',
+'blob_column', 'size_column', 'base_url'  keys. 
+        
+        option['db_table']
+            Table to work with.
+        option['fname_column']
+            Column in the 'db_table' containing filenames (filenames can
+contain pathes). Values should be the same as where FileField keeps
+filenames. 
+            It is used to map filename to blob_column. In sql it's simply
+used in where clause. 
+        option['blob_column']
+            Blob column (for example 'image' type), created manually in the
+'db_table', used to store image.
+        option['size_column']
+            Column to store file size. Used for optimization of size()
+method (another way is to open file and get size)
+        option['base_url']
+            Url prefix used with filenames. Should be mapped to the view,
+that returns an image as result. 
+        """
+        
+        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
+                              and option.has_key('size_column') and option.has_key('base_url') ):
+            raise ValueError("You didn't specify required options")
+        self.db_table = option['db_table']
+        self.fname_column = option['fname_column']
+        self.blob_column = option['blob_column']
+        self.size_column = option['size_column']
+        self.base_url = option['base_url']
+
+        #get database settings
+        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
+        self.DATABASE_NAME = settings.DATABASE_NAME
+        self.DATABASE_USER = settings.DATABASE_USER
+        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
+        self.DATABASE_HOST = settings.DATABASE_HOST
+        
+        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
+                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
+        self.cursor = self.connection.cursor()
+
+    def _open(self, name, mode='rb'):
+        """Open a file from database. 
+        
+        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
+        If there is no such file in the db, returs None
+        """
+        
+        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
+
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
+        if row is None:
+            return None
+        inMemFile = StringIO.StringIO(row[0])
+        inMemFile.name = name
+        inMemFile.mode = mode
+        
+        retFile = File(inMemFile)
+        return retFile
+
+    def _save(self, name, content):
+        """Save 'content' as file named 'name'.
+        
+        @note '\' in path will be converted to '/'. 
+        """
+        
+        name = name.replace('\\', '/')
+        binary = pyodbc.Binary(content.read())
+        size = len(binary)
+        
+        #todo: check result and do something (exception?) if failed.
+        if self.exists(name):
+            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
+                                 (binary, size)  )
+        else:
+            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
+        self.connection.commit()
+        return name
+
+    def exists(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
+        return row is not None
+    
+    def get_available_name(self, name):
+        return name
+
+    def delete(self, name):
+        if self.exists(name):
+            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
+            self.connection.commit()
+
+    def url(self, name):
+        if self.base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+    
+    def size(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
+        if row is None:
+            return 0
+        else:
+            return int(row[0])

storages/backends/ftp.py

+# FTP storage class for Django pluggable storage system.
+# Author: Rafal Jonca <jonca.rafal@gmail.com>
+# License: MIT
+# Comes from http://www.djangosnippets.org/snippets/1269/
+#
+# Usage:
+#
+# Add below to settings.py:
+# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
+#
+# In models.py you can write:
+# from FTPStorage import FTPStorage
+# fs = FTPStorage()
+# class FTPTest(models.Model):
+#     file = models.FileField(upload_to='a/b/c/', storage=fs)
+
+import os
+import ftplib
+import urlparse
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+
+class FTPStorageException(Exception): pass
+
+class FTPStorage(Storage):
+    """FTP Storage class for Django pluggable storage system."""
+
+    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
+        self._config = self._decode_location(location)
+        self._base_url = base_url
+        self._connection = None
+
+    def _decode_location(self, location):
+        """Return splitted configuration data from location."""
+        splitted_url = urlparse.urlparse(location)
+        config = {}
+        
+        if splitted_url.scheme not in ('ftp', 'aftp'):
+            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
+        if splitted_url.hostname == '':
+            raise ImproperlyConfigured('You must at least provide hostname!')
+            
+        if splitted_url.scheme == 'aftp':
+            config['active'] = True
+        else:
+            config['active'] = False
+        config['path'] = splitted_url.path
+        config['host'] = splitted_url.hostname
+        config['user'] = splitted_url.username
+        config['passwd'] = splitted_url.password
+        config['port'] = int(splitted_url.port)
+        
+        return config
+
+    def _start_connection(self):
+        # Check if connection is still alive and if not, drop it.
+        if self._connection is not None:
+            try:
+                self._connection.pwd()
+            except ftplib.all_errors, e:
+                self._connection = None
+        
+        # Real reconnect
+        if self._connection is None:
+            ftp = ftplib.FTP()
+            try:
+                ftp.connect(self._config['host'], self._config['port'])
+                ftp.login(self._config['user'], self._config['passwd'])
+                if self._config['active']:
+                    ftp.set_pasv(False)
+                if self._config['path'] != '':
+                    ftp.cwd(self._config['path'])
+                self._connection = ftp
+                return
+            except ftplib.all_errors, e:
+                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
+
+    def disconnect(self):
+        self._connection.quit()
+        self._connection = None
+
+    def _mkremdirs(self, path):
+        pwd = self._connection.pwd()
+        path_splitted = path.split('/')
+        for path_part in path_splitted:
+            try:
+                self._connection.cwd(path_part)
+            except:
+                try:
+                    self._connection.mkd(path_part)
+                    self._connection.cwd(path_part)
+                except ftplib.all_errors, e:
+                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
+        self._connection.cwd(pwd)
+        return
+
+    def _put_file(self, name, content):
+        # Connection must be open!
+        try:
+            self._mkremdirs(os.path.dirname(name))
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.storbinary('STOR ' + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE)
+            self._connection.cwd(pwd)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error writing file %s' % name)
+
+    def _open(self, name, mode='rb'):
+        remote_file = FTPStorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name):
+        memory_file = StringIO()
+        try:
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
+            self._connection.cwd(pwd)
+            return memory_file
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error reading file %s' % name)
+        
+    def _save(self, name, content):
+        content.open()
+        self._start_connection()
+        self._put_file(name, content)
+        content.close()
+        return name
+
+    def _get_dir_details(self, path):
+        # Connection must be open!
+        try:
+            lines = []
+            self._connection.retrlines('LIST '+path, lines.append)
+            dirs = {}
+            files = {}
+            for line in lines:
+                words = line.split()
+                if len(words) < 6:
+                    continue
+                if words[-2] == '->':
+                    continue
+                if words[0][0] == 'd':
+                    dirs[words[-1]] = 0;
+                elif words[0][0] == '-':
+                    files[words[-1]] = int(words[-5]);
+            return dirs, files
+        except ftplib.all_errors, msg:
+            raise FTPStorageException('Error getting listing for %s' % path)
+
+    def listdir(self, path):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(path)
+            return dirs.keys(), files.keys()
+        except FTPStorageException, e:
+            raise
+
+    def delete(self, name):
+        if not self.exists(name):
+            return
+        self._start_connection()
+        try:
+            self._connection.delete(name)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when removing %s' % name)                 
+
+    def exists(self, name):
+        self._start_connection()
+        try:
+            if os.path.basename(name) in self._connection.nlst(os.path.dirname(name) + '/'):
+                return True
+            else:
+                return False
+        except ftplib.error_temp, e:
+            return False
+        except ftplib.error_perm, e:
+            # error_perm: 550 Can't find file
+            return False
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when testing existence of %s' % name)            
+
+    def size(self, name):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(os.path.dirname(name))
+            if os.path.basename(name) in files:
+                return files[os.path.basename(name)]
+            else:
+                return 0
+        except FTPStorageException, e:
+            return 0
+
+    def url(self, name):
+        if self._base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
+
+class FTPStorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self._is_read = False
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if not self._is_read:
+            self._storage._start_connection()
+            self.file = self._storage._read(self._name)
+            self._storage._end_connection()
+            self._is_read = True
+            
+        return self.file.read(num_bytes)
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+        self._is_read = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._start_connection()
+            self._storage._put_file(self._name, self.file.getvalue())
+            self._storage._end_connection()
+        self.file.close()

storages/backends/image.py

+
+import os
+
+from django.core.files.storage import FileSystemStorage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    from PIL import ImageFile as PILImageFile
+except ImportError:
+    raise ImproperlyConfigured, "Could not load PIL dependency.\
+    \nSee http://www.pythonware.com/products/pil/"
+
+
+class ImageStorage(FileSystemStorage):
+    """
+    A FileSystemStorage which normalizes extensions for images.
+    
+    Comes from http://www.djangosnippets.org/snippets/965/
+    """
+    
+    def find_extension(self, format):
+        """Normalizes PIL-returned format into a standard, lowercase extension."""
+        format = format.lower()
+        
+        if format == 'jpeg':
+            format = 'jpg'
+        
+        return format
+    
+    def save(self, name, content):
+        dirname = os.path.dirname(name)
+        basename = os.path.basename(name)
+        
+        # Use PIL to determine filetype
+        
+        p = PILImageFile.Parser()
+        while 1:
+            data = content.read(1024)
+            if not data:
+                break
+            p.feed(data)
+            if p.image:
+                im = p.image
+                break
+        
+        extension = self.find_extension(im.format)
+        
+        # Does the basename already have an extension? If so, replace it.
+        # bare as in without extension
+        bare_basename, _ = os.path.splitext(basename)
+        basename = bare_basename + '.' + extension
+        
+        name = os.path.join(dirname, basename)
+        return super(ImageStorage, self).save(name, content)
+    

storages/backends/mogile.py

+import urlparse
+import mimetypes
+from StringIO import StringIO
+
+from django.conf import settings
+from django.core.cache import cache
+from django.utils.text import force_unicode
+from django.core.files.storage import Storage
+from django.http import HttpResponse, HttpResponseNotFound
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import mogilefs
+except ImportError:
+    raise ImproperlyConfigured, "Could not load mogilefs dependency.\
+    \nSee http://mogilefs.pbworks.com/Client-Libraries"
+
+
+class MogileFSStorage(Storage):
+    """MogileFS filesystem storage"""
+    def __init__(self, base_url=settings.MEDIA_URL):
+        
+        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
+        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
+            self.base_url = settings.MOGILEFS_MEDIA_URL
+        else:
+            self.base_url = base_url
+                
+        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
+            if not hasattr(settings, var):
+                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
+            
+        self.trackers = settings.MOGILEFS_TRACKERS
+        self.domain = settings.MOGILEFS_DOMAIN
+        self.client = mogilefs.Client(self.domain, self.trackers)
+    
+    def get_mogile_paths(self, filename):
+        return self.client.get_paths(filename)  
+    
+    # The following methods define the Backend API
+
+    def filesize(self, filename):
+        raise NotImplemented
+        #return os.path.getsize(self._get_absolute_path(filename))
+    
+    def path(self, filename):
+        paths = self.get_mogile_paths(filename)
+        if paths:
+            return self.get_mogile_paths(filename)[0]
+        else:
+            return None
+    
+    def url(self, filename):
+        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
+
+    def open(self, filename, mode='rb'):
+        raise NotImplemented
+        #return open(self._get_absolute_path(filename), mode)
+
+    def exists(self, filename):
+        return filename in self.client
+
+    def save(self, filename, raw_contents):
+        filename = self.get_available_filename(filename)
+        
+        if not hasattr(self, 'mogile_class'):
+            self.mogile_class = None
+
+        # Write the file to mogile
+        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
+        if success:
+            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
+        else:
+            print "FAILURE writing file %s" % (filename)
+
+        return force_unicode(filename.replace('\\', '/'))
+
+    def delete(self, filename):
+        
+        self.client.delete(filename)
+            
+        
+def serve_mogilefs_file(request, key=None):
+    """
+    Called when a user requests an image.
+    Either reproxy the path to perlbal, or serve the image outright
+    """
+    # not the best way to do this, since we create a client each time
+    mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream"
+    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
+    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
+        # we're reproxying with perlbal
+        
+        # check the path cache
+        
+        path = cache.get(key)
+
+        if not path:
+            path = client.get_paths(key)
+            cache.set(key, path, 60)
+    
+        if path:
+            response = HttpResponse(content_type=mimetype)
+            response['X-REPROXY-URL'] = path[0]
+        else:
+            response = HttpResponseNotFound()
+    
+    else:
+        # we don't have perlbal, let's just serve the image via django
+        file_data = client[key]
+        if file_data:
+            response = HttpResponse(file_data, mimetype=mimetype)
+        else:
+            response = HttpResponseNotFound()
+    
+    return response

storages/backends/mosso.py

+"""
+Custom storage for django with Mosso Cloud Files backend.
+Created by Rich Leland <rich@richleland.com>.
+"""
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.utils.text import get_valid_filename
+
+try:
+    import cloudfiles
+    from cloudfiles.errors import NoSuchObject
+except ImportError:
+    raise ImproperlyConfigured("Could not load cloudfiles dependency. See "
+                               "http://www.mosso.com/cloudfiles.jsp.")
+
+# TODO: implement TTL into cloudfiles methods
+CLOUDFILES_TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
+
+
+def cloudfiles_upload_to(self, filename):
+    """
+    Simple, custom upload_to because Cloud Files doesn't support
+    nested containers (directories).
+
+    Actually found this out from @minter:
+    @richleland The Cloud Files APIs do support pseudo-subdirectories, by
+    creating zero-byte files with type application/directory.
+
+    May implement in a future version.
+    """
+    return get_valid_filename(filename)
+
+
+class CloudFilesStorage(Storage):
+    """
+    Custom storage for Mosso Cloud Files.
+    """
+    default_quick_listdir = True
+
+    def __init__(self, username=None, api_key=None, container=None,
+                 connection_kwargs=None):
+        """
+        Initialize the settings for the connection and container.
+        """
+        self.username = username or settings.CLOUDFILES_USERNAME
+        self.api_key = api_key or settings.CLOUDFILES_API_KEY
+        self.container_name = container or settings.CLOUDFILES_CONTAINER
+        self.connection_kwargs = connection_kwargs or {}
+
+    def __getstate__(self):
+        """
+        Return a picklable representation of the storage.
+        """
+        return dict(username=self.username,
+                    api_key=self.api_key,
+                    container_name=self.container_name,
+                    connection_kwargs=self.connection_kwargs)
+
+    def _get_connection(self):
+        if not hasattr(self, '_connection'):
+            self._connection = cloudfiles.get_connection(self.username,
+                                    self.api_key, **self.connection_kwargs)
+        return self._connection
+
+    def _set_connection(self, value):
+        self._connection = value
+
+    connection = property(_get_connection, _set_connection)
+
+    def _get_container(self):
+        if not hasattr(self, '_container'):
+            self.container = self.connection.get_container(
+                                                        self.container_name)
+        return self._container
+
+    def _set_container(self, container):
+        """
+        Set the container, making it publicly available (on Limelight CDN) if
+        it is not already.
+        """
+        if not container.is_public():
+            container.make_public()
+        if hasattr(self, '_container_public_uri'):
+            delattr(self, '_container_public_uri')
+        self._container = container
+
+    container = property(_get_container, _set_container)
+
+    def _get_container_url(self):
+        if not hasattr(self, '_container_public_uri'):
+            self._container_public_uri = self.container.public_uri()
+        return self._container_public_uri
+
+    container_url = property(_get_container_url)
+
+    def _get_cloud_obj(self, name):
+        """
+        Helper function to get retrieve the requested Cloud Files Object.
+        """
+        return self.container.get_object(name)
+
+    def _open(self, name, mode='rb'):
+        """
+        Return the CloudFilesStorageFile.
+        """
+        return CloudFilesStorageFile(storage=self, name=name)
+
+    def _save(self, name, content):
+        """
+        Use the Cloud Files service to write ``content`` to a remote file
+        (called ``name``).
+        """
+        content.open()
+        cloud_obj = self.container.create_object(name)
+        cloud_obj.size = content.file.size
+        # If the content type is available, pass it in directly rather than
+        # getting the cloud object to try to guess.
+        if hasattr(content.file, 'content_type'):
+            cloud_obj.content_type = content.file.content_type
+        cloud_obj.send(content)
+        content.close()
+        return name
+
+    def delete(self, name):
+        """
+        Deletes the specified file from the storage system.
+        """
+        self.container.delete_object(name)
+
+    def exists(self, name):
+        """
+        Returns True if a file referenced by the given name already exists in
+        the storage system, or False if the name is available for a new file.
+        """
+        try:
+            self._get_cloud_obj(name)
+            return True
+        except NoSuchObject:
+            return False
+
+    def listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple; the
+        first being an empty list of directories (not available for quick-
+        listing), the second being a list of filenames.
+
+        If the list of directories is required, use the full_listdir method.
+        """
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(path=path):
+            files.append(name[path_len:])
+        return ([], files)
+
+    def full_listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple of lists;
+        the first item being directories, the second item being files.
+
+        On large containers, this may be a slow operation for root containers
+        because every single object must be returned (cloudfiles does not
+        provide an explicit way of listing directories).
+        """
+        dirs = set()
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(prefix=path):
+            name = name[path_len:]
+            slash = name[1:-1].find('/') + 1
+            if slash:
+                dirs.add(name[:slash])
+            elif name:
+                files.append(name)
+        dirs = list(dirs)
+        dirs.sort()
+        return (dirs, files)
+
+    def size(self, name):
+        """
+        Returns the total size, in bytes, of the file specified by name.
+        """
+        return self._get_cloud_obj(name).size
+
+    def url(self, name):
+        """
+        Returns an absolute URL where the file's contents can be accessed
+        directly by a web browser.
+        """
+        return '%s/%s' % (self.container_url, name)
+
+
+class CloudFilesStorageFile(File):
+    closed = False
+
+    def __init__(self, storage, name, *args, **kwargs):
+        self._storage = storage
+        super(CloudFilesStorageFile, self).__init__(file=None, name=name,
+                                                    *args, **kwargs)
+
+    def _get_size(self):
+        if not hasattr(self, '_size'):