Commits

jba...@iconn  committed f94f767

pre knoxville checkin... almost there

  • Participants
  • Parent commits 358fb59

Comments (0)

Files changed (170)

File __init__.pyc

Binary file added.

File annoying/__init__.py

Empty file added.

File annoying/decorators.py

+from django.shortcuts import render_to_response
+from django import forms
+from django.template import RequestContext
+from django.db.models import signals as signalmodule
+from django.http import HttpResponse
+from django.utils import simplejson
+
+__all__ = ['render_to', 'signals', 'ajax_request', 'autostrip']
+
+
+try:
+    from functools import wraps
+except ImportError: 
+    def wraps(wrapped, assigned=('__module__', '__name__', '__doc__'),
+              updated=('__dict__',)):
+        def inner(wrapper):
+            for attr in assigned:
+                setattr(wrapper, attr, getattr(wrapped, attr))
+            for attr in updated:
+                getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+            return wrapper
+        return inner
+
+
+def render_to(template=None):
+    """
+    Decorator for Django views that sends returned dict to render_to_response function.
+
+    Template name can be decorator parameter or TEMPLATE item in returned dictionary.
+    RequestContext always added as context instance.
+    If view doesn't return dict then decorator simply returns output.
+
+    Parameters:
+     - template: template name to use
+
+    Examples:
+    # 1. Template name in decorator parameters
+
+    @render_to('template.html')
+    def foo(request):
+        bar = Bar.object.all()  
+        return {'bar': bar}
+
+    # equals to 
+    def foo(request):
+        bar = Bar.object.all()  
+        return render_to_response('template.html', 
+                                  {'bar': bar}, 
+                                  context_instance=RequestContext(request))
+
+
+    # 2. Template name as TEMPLATE item value in return dictionary
+
+    @render_to()
+    def foo(request, category):
+        template_name = '%s.html' % category
+        return {'bar': bar, 'TEMPLATE': template_name}
+    
+    #equals to
+    def foo(request, category):
+        template_name = '%s.html' % category
+        return render_to_response(template_name, 
+                                  {'bar': bar}, 
+                                  context_instance=RequestContext(request))
+
+    """
+    def renderer(function):
+        @wraps(function)
+        def wrapper(request, *args, **kwargs):
+            output = function(request, *args, **kwargs)
+            if not isinstance(output, dict):
+                return output
+            tmpl = output.pop('TEMPLATE', template)
+            return render_to_response(tmpl, output, context_instance=RequestContext(request))
+        return wrapper
+    return renderer
+
+
+
+class Signals(object):
+    '''
+    Convenient wrapper for working with Django's signals (or any other
+    implementation using same API).
+
+    Example of usage::
+
+
+       # connect to registered signal
+       @signals.post_save(sender=YourModel)
+       def sighandler(instance, **kwargs):
+           pass
+
+       # connect to any signal
+       signals.register_signal(siginstance, signame) # and then as in example above
+
+       or 
+        
+       @signals(siginstance, sender=YourModel)
+       def sighandler(instance, **kwargs):
+           pass
+
+    In any case defined function will remain as is, without any changes.
+
+    (c) 2008 Alexander Solovyov, new BSD License
+    '''
+    def __init__(self):
+        self._signals = {}
+
+        # register all Django's default signals
+        for k, v in signalmodule.__dict__.iteritems():
+            # that's hardcode, but IMHO it's better than isinstance
+            if not k.startswith('__') and k != 'Signal':
+                self.register_signal(v, k)
+
+    def __getattr__(self, name):
+        return self._connect(self._signals[name])
+
+    def __call__(self, signal, **kwargs):
+        def inner(func):
+            signal.connect(func, **kwargs)
+            return func
+        return inner
+
+    def _connect(self, signal):
+        def wrapper(**kwargs):
+            return self(signal, **kwargs)
+        return wrapper
+
+    def register_signal(self, signal, name):
+        self._signals[name] = signal
+
+signals = Signals()
+
+
+
+class JsonResponse(HttpResponse):
+    """
+    HttpResponse descendant, which return response with ``application/json`` mimetype.
+    """
+    def __init__(self, data):
+        super(JsonResponse, self).__init__(content=simplejson.dumps(data), mimetype='application/json')
+
+
+
+def ajax_request(func):
+    """
+    If view returned serializable dict, returns JsonResponse with this dict as content.
+
+    example:
+        
+        @ajax_request
+        def my_view(request):
+            news = News.objects.all()
+            news_titles = [entry.title for entry in news]
+            return {'news_titles': news_titles}
+    """
+    @wraps(func)
+    def wrapper(request, *args, **kwargs):
+        response = func(request, *args, **kwargs)
+        if isinstance(response, dict):
+            return JsonResponse(response)
+        else:
+            return response
+    return wrapper
+
+
+def autostrip(cls):
+    """
+    strip text fields before validation
+
+    example:
+    class PersonForm(forms.Form):
+        name = forms.CharField(min_length=2, max_length=10)
+        email = forms.EmailField()
+
+    PersonForm = autostrip(PersonForm)
+    
+    #or you can use @autostrip in python >= 2.6
+
+    Author: nail.xx
+    """
+    fields = [(key, value) for key, value in cls.base_fields.iteritems() if isinstance(value, forms.CharField)]
+    for field_name, field_object in fields:
+        def get_clean_func(original_clean):
+            return lambda value: original_clean(value and value.strip())
+        clean_func = get_clean_func(getattr(field_object, 'clean'))
+        setattr(field_object, 'clean', clean_func)
+    return cls
+

File annoying/exceptions.py

+class Redirect(Exception):
+    def __init__(self, *args, **kwargs):
+        self.args = args
+        self.kwargs = kwargs
+

File annoying/fields.py

+from django.db.models import OneToOneField
+from django.db.models.fields.related import SingleRelatedObjectDescriptor
+
+
+class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
+    def __get__(self, instance, instance_type=None):
+        try:
+            return super(AutoSingleRelatedObjectDescriptor, self).__get__(instance, instance_type)
+        except self.related.model.DoesNotExist:
+            obj = self.related.model(**{self.related.field.name: instance})
+            obj.save()
+            return obj
+
+class AutoOneToOneField(OneToOneField):
+    '''
+    OneToOneField creates related object on first call if it doesnt exists yet.
+    Use it instead of original OneToOne field.
+
+    example:
+        
+        class MyProfile(models.Model):
+            user = AutoOneToOneField(User, primary_key=True)
+            home_page = models.URLField(max_length=255)
+            icq = models.CharField(max_length=255)
+    '''
+    def contribute_to_related_class(self, cls, related):
+        setattr(cls, related.get_accessor_name(), AutoSingleRelatedObjectDescriptor(related))
+

File annoying/functions.py

+from django.shortcuts import _get_queryset
+from django.conf import settings
+
+
+def get_object_or_None(klass, *args, **kwargs):
+    """
+    Uses get() to return an object or None if the object does not exist.
+
+    klass may be a Model, Manager, or QuerySet object. All other passed
+    arguments and keyword arguments are used in the get() query.
+
+    Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
+    object is found.
+    """
+    queryset = _get_queryset(klass)
+    try:
+        return queryset.get(*args, **kwargs)
+    except queryset.model.DoesNotExist:
+        return None
+
+
+
+def get_config(key, default):
+    """
+    Get settings from django.conf if exists,
+    return default value otherwise
+
+    example:
+
+    ADMIN_EMAIL = get_config('ADMIN_EMAIL', 'default@email.com')
+    """
+    return getattr(settings, key, default)

File annoying/middlewares.py

+import re
+
+from django.conf import settings
+from django.views.static import serve
+from django.shortcuts import redirect
+
+from .exceptions import Redirect
+
+
+class StaticServe(object):
+    """
+    Django middleware for serving static files instead of using urls.py
+    """
+    regex = re.compile(r'^%s(?P<path>.*)$' % settings.MEDIA_URL)
+
+    def process_request(self, request):
+        if settings.DEBUG:
+            match = self.regex.search(request.path)
+            if match:
+                return serve(request, match.group(1), settings.MEDIA_ROOT)
+
+
+class RedirectMiddleware(object):
+    """
+    You must add this middleware to MIDDLEWARE_CLASSES list,
+    to make work Redirect exception. All arguments passed to
+    Redirect will be passed to django built in redirect function.
+    """
+    def process_exception(self, request, exception):
+        if not isinstance(exception, Redirect):
+            return
+        return redirect(*exception.args, **exception.kwargs)

File annoying/templatetags/__init__.py

Empty file added.

File annoying/templatetags/annoying.py

+from django import template
+
+from smart_if import smart_if
+
+
+register = template.Library()
+
+register.tag('if', smart_if)

File annoying/templatetags/smart_if.py

+from django import template
+
+__author__ = "SmileyChris"
+
+#==============================================================================
+# Calculation objects
+#==============================================================================
+
+class BaseCalc(object):
+    def __init__(self, var1, var2=None, negate=False):
+        self.var1 = var1
+        self.var2 = var2
+        self.negate = negate
+
+    def resolve(self, context):
+        try:
+            var1, var2 = self.resolve_vars(context)
+            outcome = self.calculate(var1, var2)
+        except:
+            outcome = False
+        if self.negate:
+            return not outcome
+        return outcome
+
+    def resolve_vars(self, context):
+        var2 = self.var2 and self.var2.resolve(context)
+        return self.var1.resolve(context), var2
+
+    def calculate(self, var1, var2):
+        raise NotImplementedError()
+
+
+class Or(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 or var2
+
+
+class And(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 and var2
+
+
+class Equals(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 == var2
+
+
+class Greater(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 > var2
+
+
+class GreaterOrEqual(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 >= var2
+
+
+class In(BaseCalc):
+    def calculate(self, var1, var2):
+        return var1 in var2
+
+
+OPERATORS = {
+    '=': (Equals, True),
+    '==': (Equals, True),
+    '!=': (Equals, False),
+    '>': (Greater, True),
+    '>=': (GreaterOrEqual, True),
+    '<=': (Greater, False),
+    '<': (GreaterOrEqual, False),
+    'or': (Or, True),
+    'and': (And, True),
+    'in': (In, True),
+}
+BOOL_OPERATORS = ('or', 'and')
+
+
+class IfParser(object):
+    error_class = ValueError
+
+    def __init__(self, tokens):
+        self.tokens = tokens
+
+    def _get_tokens(self):
+        return self._tokens
+
+    def _set_tokens(self, tokens):
+        self._tokens = tokens
+        self.len = len(tokens)
+        self.pos = 0
+
+    tokens = property(_get_tokens, _set_tokens)
+
+    def parse(self):
+        if self.at_end():
+            raise self.error_class('No variables provided.')
+        var1 = self.get_bool_var()
+        while not self.at_end():
+            op, negate = self.get_operator()
+            var2 = self.get_bool_var()
+            var1 = op(var1, var2, negate=negate)
+        return var1
+
+    def get_token(self, eof_message=None, lookahead=False):
+        negate = True
+        token = None
+        pos = self.pos
+        while token is None or token == 'not':
+            if pos >= self.len:
+                if eof_message is None:
+                    raise self.error_class()
+                raise self.error_class(eof_message)
+            token = self.tokens[pos]
+            negate = not negate
+            pos += 1
+        if not lookahead:
+            self.pos = pos
+        return token, negate
+
+    def at_end(self):
+        return self.pos >= self.len
+
+    def create_var(self, value):
+        return TestVar(value)
+
+    def get_bool_var(self):
+        """
+        Returns either a variable by itself or a non-boolean operation (such as
+        ``x == 0`` or ``x < 0``).
+
+        This is needed to keep correct precedence for boolean operations (i.e.
+        ``x or x == 0`` should be ``x or (x == 0)``, not ``(x or x) == 0``).
+        """
+        var = self.get_var()
+        if not self.at_end():
+            op_token = self.get_token(lookahead=True)[0]
+            if isinstance(op_token, basestring) and (op_token not in
+                                                     BOOL_OPERATORS):
+                op, negate = self.get_operator()
+                return op(var, self.get_var(), negate=negate)
+        return var
+
+    def get_var(self):
+        token, negate = self.get_token('Reached end of statement, still '
+                                       'expecting a variable.')
+        if isinstance(token, basestring) and token in OPERATORS:
+            raise self.error_class('Expected variable, got operator (%s).' %
+                                   token)
+        var = self.create_var(token)
+        if negate:
+            return Or(var, negate=True)
+        return var
+
+    def get_operator(self):
+        token, negate = self.get_token('Reached end of statement, still '
+                                       'expecting an operator.')
+        if not isinstance(token, basestring) or token not in OPERATORS:
+            raise self.error_class('%s is not a valid operator.' % token)
+        if self.at_end():
+            raise self.error_class('No variable provided after "%s".' % token)
+        op, true = OPERATORS[token]
+        if not true:
+            negate = not negate
+        return op, negate
+
+
+#==============================================================================
+# Actual templatetag code.
+#==============================================================================
+
+class TemplateIfParser(IfParser):
+    error_class = template.TemplateSyntaxError
+
+    def __init__(self, parser, *args, **kwargs):
+        self.template_parser = parser
+        return super(TemplateIfParser, self).__init__(*args, **kwargs)
+
+    def create_var(self, value):
+        return self.template_parser.compile_filter(value)
+
+
+class SmartIfNode(template.Node):
+    def __init__(self, var, nodelist_true, nodelist_false=None):
+        self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
+        self.var = var
+
+    def render(self, context):
+        if self.var.resolve(context):
+            return self.nodelist_true.render(context)
+        if self.nodelist_false:
+            return self.nodelist_false.render(context)
+        return ''
+
+    def __repr__(self):
+        return "<Smart If node>"
+
+    def __iter__(self):
+        for node in self.nodelist_true:
+            yield node
+        if self.nodelist_false:
+            for node in self.nodelist_false:
+                yield node
+
+    def get_nodes_by_type(self, nodetype):
+        nodes = []
+        if isinstance(self, nodetype):
+            nodes.append(self)
+        nodes.extend(self.nodelist_true.get_nodes_by_type(nodetype))
+        if self.nodelist_false:
+            nodes.extend(self.nodelist_false.get_nodes_by_type(nodetype))
+        return nodes
+
+
+def smart_if(parser, token):
+    """
+    A smarter {% if %} tag for django templates.
+
+    While retaining current Django functionality, it also handles equality,
+    greater than and less than operators. Some common case examples::
+
+        {% if articles|length >= 5 %}...{% endif %}
+        {% if "ifnotequal tag" != "beautiful" %}...{% endif %}
+
+    Arguments and operators _must_ have a space between them, so
+    ``{% if 1>2 %}`` is not a valid smart if tag.
+
+    All supported operators are: ``or``, ``and``, ``in``, ``=`` (or ``==``),
+    ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
+    """
+    bits = token.split_contents()[1:]
+    var = TemplateIfParser(parser, bits).parse()
+    nodelist_true = parser.parse(('else', 'endif'))
+    token = parser.next_token()
+    if token.contents == 'else':
+        nodelist_false = parser.parse(('endif',))
+        parser.delete_first_token()
+    else:
+        nodelist_false = None
+    return SmartIfNode(var, nodelist_true, nodelist_false)
+

File annoying/utils.py

+from django.http import HttpResponse
+from django.utils.encoding import iri_to_uri
+
+
+class HttpResponseReload(HttpResponse):
+    """
+    Reload page and stay on the same page from where request was made.
+
+    example:
+
+    def simple_view(request):
+        if request.POST:
+            form = CommentForm(request.POST):
+            if form.is_valid():
+                form.save()
+                return HttpResponseReload(request)
+        else:
+            form = CommentForm()
+        return render_to_response('some_template.html', {'form': form})
+    """
+    status_code = 302
+
+    def __init__(self, request):
+        HttpResponse.__init__(self)
+        referer = request.META.get('HTTP_REFERER')
+        self['Location'] = iri_to_uri(referer or "/")

File backends/__init__.py

Empty file added.

File backends/couchdb.py

+"""
+This is a Custom Storage System for Django with CouchDB backend.
+Created by Christian Klein.
+(c) Copyright 2009 HUDORA GmbH. All Rights Reserved.
+"""
+import os
+from cStringIO import StringIO
+from urlparse import urljoin
+from urllib import quote_plus
+
+from django.conf import settings
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import couchdb
+except ImportError:
+    raise ImproperlyConfigured, "Could not load couchdb dependency.\
+    \nSee http://code.google.com/p/couchdb-python/"
+
+DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984')
+STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {})
+
+
+class CouchDBStorage(Storage):
+    """
+    CouchDBStorage - a Django Storage class for CouchDB.
+
+    The CouchDBStorage can be configured in settings.py, e.g.::
+    
+        COUCHDB_STORAGE_OPTIONS = {
+            'server': "http://example.org", 
+            'database': 'database_name'
+        }
+
+    Alternatively, the configuration can be passed as a dictionary.
+    """
+    def __init__(self, **kwargs):
+        kwargs.update(STORAGE_OPTIONS)
+        self.base_url = kwargs.get('server', DEFAULT_SERVER)
+        server = couchdb.client.Server(self.base_url)
+        self.db = server[kwargs.get('database')]
+
+    def _put_file(self, name, content):
+        self.db[name] = {'size': len(content)}
+        self.db.put_attachment(self.db[name], content, filename='content')
+        return name
+
+    def get_document(self, name):
+        return self.db.get(name)
+
+    def _open(self, name, mode='rb'):
+        couchdb_file = CouchDBFile(name, self, mode=mode)
+        return couchdb_file
+
+    def _save(self, name, content):
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        name = name.replace('/', '-')
+        return self._put_file(name, content_str)
+
+    def exists(self, name):
+        return name in self.db
+
+    def size(self, name):
+        doc = self.get_document(name)
+        if doc:
+            return doc['size']
+        return 0
+
+    def url(self, name):
+        return urljoin(self.base_url, 
+                       os.path.join(quote_plus(self.db.name), 
+                       quote_plus(name), 
+                       'content'))
+
+    def delete(self, name):
+        try:
+            del self.db[name]
+        except couchdb.client.ResourceNotFound:
+            raise IOError("File not found: %s" % name)
+
+    #def listdir(self, name):
+    # _all_docs?
+    #    pass
+
+
+class CouchDBFile(File):
+    """
+    CouchDBFile - a Django File-like class for CouchDB documents.
+    """
+
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+
+        try:
+            self._doc = self._storage.get_document(name)
+
+            tmp, ext = os.path.split(name)
+            if ext:
+                filename = "content." + ext
+            else:
+                filename = "content"
+            attachment = self._storage.db.get_attachment(self._doc, filename=filename)
+            self.file = StringIO(attachment)
+        except couchdb.client.ResourceNotFound:
+            if 'r' in self._mode:
+                raise ValueError("The file cannot be reopened.")
+            else:
+                self.file = StringIO()
+                self._is_dirty = True
+
+    @property
+    def size(self):
+        return self._doc['size']
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._put_file(self._name, self.file.getvalue())
+        self.file.close()
+
+

File backends/database.py

+# DatabaseStorage for django.
+# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
+import StringIO
+import urlparse
+
+from django.conf import settings
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import pyodbc
+except ImportError:
+    raise ImproperlyConfigured, "Could not load pyodbc dependency.\
+    \nSee http://code.google.com/p/pyodbc/"
+
+
+class DatabaseStorage(Storage):
+    """
+    Class DatabaseStorage provides storing files in the database. 
+    """
+
+    def __init__(self, option=settings.DB_FILES):
+        """Constructor. 
+        
+        Constructs object using dictionary either specified in contucotr or
+in settings.DB_FILES. 
+        
+        @param option dictionary with 'db_table', 'fname_column',
+'blob_column', 'size_column', 'base_url'  keys. 
+        
+        option['db_table']
+            Table to work with.
+        option['fname_column']
+            Column in the 'db_table' containing filenames (filenames can
+contain pathes). Values should be the same as where FileField keeps
+filenames. 
+            It is used to map filename to blob_column. In sql it's simply
+used in where clause. 
+        option['blob_column']
+            Blob column (for example 'image' type), created manually in the
+'db_table', used to store image.
+        option['size_column']
+            Column to store file size. Used for optimization of size()
+method (another way is to open file and get size)
+        option['base_url']
+            Url prefix used with filenames. Should be mapped to the view,
+that returns an image as result. 
+        """
+        
+        if not option or not (option.has_key('db_table') and option.has_key('fname_column') and option.has_key('blob_column')
+                              and option.has_key('size_column') and option.has_key('base_url') ):
+            raise ValueError("You didn't specify required options")
+        self.db_table = option['db_table']
+        self.fname_column = option['fname_column']
+        self.blob_column = option['blob_column']
+        self.size_column = option['size_column']
+        self.base_url = option['base_url']
+
+        #get database settings
+        self.DATABASE_ODBC_DRIVER = settings.DATABASE_ODBC_DRIVER
+        self.DATABASE_NAME = settings.DATABASE_NAME
+        self.DATABASE_USER = settings.DATABASE_USER
+        self.DATABASE_PASSWORD = settings.DATABASE_PASSWORD
+        self.DATABASE_HOST = settings.DATABASE_HOST
+        
+        self.connection = pyodbc.connect('DRIVER=%s;SERVER=%s;DATABASE=%s;UID=%s;PWD=%s'%(self.DATABASE_ODBC_DRIVER,self.DATABASE_HOST,self.DATABASE_NAME,
+                                                                                          self.DATABASE_USER, self.DATABASE_PASSWORD) )
+        self.cursor = self.connection.cursor()
+
+    def _open(self, name, mode='rb'):
+        """Open a file from database. 
+        
+        @param name filename or relative path to file based on base_url. path should contain only "/", but not "\". Apache sends pathes with "/".
+        If there is no such file in the db, returs None
+        """
+        
+        assert mode == 'rb', "You've tried to open binary file without specifying binary mode! You specified: %s"%mode
+
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.blob_column,self.db_table,self.fname_column,name) ).fetchone()
+        if row is None:
+            return None
+        inMemFile = StringIO.StringIO(row[0])
+        inMemFile.name = name
+        inMemFile.mode = mode
+        
+        retFile = File(inMemFile)
+        return retFile
+
+    def _save(self, name, content):
+        """Save 'content' as file named 'name'.
+        
+        @note '\' in path will be converted to '/'. 
+        """
+        
+        name = name.replace('\\', '/')
+        binary = pyodbc.Binary(content.read())
+        size = len(binary)
+        
+        #todo: check result and do something (exception?) if failed.
+        if self.exists(name):
+            self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), 
+                                 (binary, size)  )
+        else:
+            self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size)  )
+        self.connection.commit()
+        return name
+
+    def exists(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.fname_column,self.db_table,self.fname_column,name)).fetchone()
+        return row is not None
+    
+    def get_available_name(self, name):
+        return name
+
+    def delete(self, name):
+        if self.exists(name):
+            self.cursor.execute("DELETE FROM %s WHERE %s = '%s'"%(self.db_table,self.fname_column,name))
+            self.connection.commit()
+
+    def url(self, name):
+        if self.base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+    
+    def size(self, name):
+        row = self.cursor.execute("SELECT %s from %s where %s = '%s'"%(self.size_column,self.db_table,self.fname_column,name)).fetchone()
+        if row is None:
+            return 0
+        else:
+            return int(row[0])

File backends/ftp.py

+# FTP storage class for Django pluggable storage system.
+# Author: Rafal Jonca <jonca.rafal@gmail.com>
+# License: MIT
+# Comes from http://www.djangosnippets.org/snippets/1269/
+#
+# Usage:
+#
+# Add below to settings.py:
+# FTP_STORAGE_LOCATION = '[a]ftp://<user>:<pass>@<host>:<port>/[path]'
+#
+# In models.py you can write:
+# from FTPStorage import FTPStorage
+# fs = FTPStorage()
+# class FTPTest(models.Model):
+#     file = models.FileField(upload_to='a/b/c/', storage=fs)
+
+import os
+import ftplib
+import urlparse
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+
+class FTPStorageException(Exception): pass
+
+class FTPStorage(Storage):
+    """FTP Storage class for Django pluggable storage system."""
+
+    def __init__(self, location=settings.FTP_STORAGE_LOCATION, base_url=settings.MEDIA_URL):
+        self._config = self._decode_location(location)
+        self._base_url = base_url
+        self._connection = None
+
+    def _decode_location(self, location):
+        """Return splitted configuration data from location."""
+        splitted_url = urlparse.urlparse(location)
+        config = {}
+        
+        if splitted_url.scheme not in ('ftp', 'aftp'):
+            raise ImproperlyConfigured('FTPStorage works only with FTP protocol!')
+        if splitted_url.hostname == '':
+            raise ImproperlyConfigured('You must at least provide hostname!')
+            
+        if splitted_url.scheme == 'aftp':
+            config['active'] = True
+        else:
+            config['active'] = False
+        config['path'] = splitted_url.path
+        config['host'] = splitted_url.hostname
+        config['user'] = splitted_url.username
+        config['passwd'] = splitted_url.password
+        config['port'] = int(splitted_url.port)
+        
+        return config
+
+    def _start_connection(self):
+        # Check if connection is still alive and if not, drop it.
+        if self._connection is not None:
+            try:
+                self._connection.pwd()
+            except ftplib.all_errors, e:
+                self._connection = None
+        
+        # Real reconnect
+        if self._connection is None:
+            ftp = ftplib.FTP()
+            try:
+                ftp.connect(self._config['host'], self._config['port'])
+                ftp.login(self._config['user'], self._config['passwd'])
+                if self._config['active']:
+                    ftp.set_pasv(False)
+                if self._config['path'] != '':
+                    ftp.cwd(self._config['path'])
+                self._connection = ftp
+                return
+            except ftplib.all_errors, e:
+                raise FTPStorageException('Connection or login error using data %s' % repr(self._config))
+
+    def disconnect(self):
+        self._connection.quit()
+        self._connection = None
+
+    def _mkremdirs(self, path):
+        pwd = self._connection.pwd()
+        path_splitted = path.split('/')
+        for path_part in path_splitted:
+            try:
+                self._connection.cwd(path_part)
+            except:
+                try:
+                    self._connection.mkd(path_part)
+                    self._connection.cwd(path_part)
+                except ftplib.all_errors, e:
+                    raise FTPStorageException('Cannot create directory chain %s' % path)                    
+        self._connection.cwd(pwd)
+        return
+
+    def _put_file(self, name, content):
+        # Connection must be open!
+        try:
+            self._mkremdirs(os.path.dirname(name))
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.storbinary('STOR ' + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE)
+            self._connection.cwd(pwd)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error writing file %s' % name)
+
+    def _open(self, name, mode='rb'):
+        remote_file = FTPStorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name):
+        memory_file = StringIO()
+        try:
+            pwd = self._connection.pwd()
+            self._connection.cwd(os.path.dirname(name))
+            self._connection.retrbinary('RETR ' + os.path.basename(name), memory_file.write)
+            self._connection.cwd(pwd)
+            return memory_file
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error reading file %s' % name)
+        
+    def _save(self, name, content):
+        content.open()
+        self._start_connection()
+        self._put_file(name, content)
+        content.close()
+        return name
+
+    def _get_dir_details(self, path):
+        # Connection must be open!
+        try:
+            lines = []
+            self._connection.retrlines('LIST '+path, lines.append)
+            dirs = {}
+            files = {}
+            for line in lines:
+                words = line.split()
+                if len(words) < 6:
+                    continue
+                if words[-2] == '->':
+                    continue
+                if words[0][0] == 'd':
+                    dirs[words[-1]] = 0;
+                elif words[0][0] == '-':
+                    files[words[-1]] = int(words[-5]);
+            return dirs, files
+        except ftplib.all_errors, msg:
+            raise FTPStorageException('Error getting listing for %s' % path)
+
+    def listdir(self, path):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(path)
+            return dirs.keys(), files.keys()
+        except FTPStorageException, e:
+            raise
+
+    def delete(self, name):
+        if not self.exists(name):
+            return
+        self._start_connection()
+        try:
+            self._connection.delete(name)
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when removing %s' % name)                 
+
+    def exists(self, name):
+        self._start_connection()
+        try:
+            if os.path.basename(name) in self._connection.nlst(os.path.dirname(name) + '/'):
+                return True
+            else:
+                return False
+        except ftplib.error_temp, e:
+            return False
+        except ftplib.error_perm, e:
+            # error_perm: 550 Can't find file
+            return False
+        except ftplib.all_errors, e:
+            raise FTPStorageException('Error when testing existence of %s' % name)            
+
+    def size(self, name):
+        self._start_connection()
+        try:
+            dirs, files = self._get_dir_details(os.path.dirname(name))
+            if os.path.basename(name) in files:
+                return files[os.path.basename(name)]
+            else:
+                return 0
+        except FTPStorageException, e:
+            return 0
+
+    def url(self, name):
+        if self._base_url is None:
+            raise ValueError("This file is not accessible via a URL.")
+        return urlparse.urljoin(self._base_url, name).replace('\\', '/')
+
+class FTPStorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self._is_read = False
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if not self._is_read:
+            self._storage._start_connection()
+            self.file = self._storage._read(self._name)
+            self._storage._end_connection()
+            self._is_read = True
+            
+        return self.file.read(num_bytes)
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+        self._is_read = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._start_connection()
+            self._storage._put_file(self._name, self.file.getvalue())
+            self._storage._end_connection()
+        self.file.close()

File backends/image.py

+
+import os
+
+from django.core.files.storage import FileSystemStorage
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    from PIL import ImageFile as PILImageFile
+except ImportError:
+    raise ImproperlyConfigured, "Could not load PIL dependency.\
+    \nSee http://www.pythonware.com/products/pil/"
+
+
+class ImageStorage(FileSystemStorage):
+    """
+    A FileSystemStorage which normalizes extensions for images.
+    
+    Comes from http://www.djangosnippets.org/snippets/965/
+    """
+    
+    def find_extension(self, format):
+        """Normalizes PIL-returned format into a standard, lowercase extension."""
+        format = format.lower()
+        
+        if format == 'jpeg':
+            format = 'jpg'
+        
+        return format
+    
+    def save(self, name, content):
+        dirname = os.path.dirname(name)
+        basename = os.path.basename(name)
+        
+        # Use PIL to determine filetype
+        
+        p = PILImageFile.Parser()
+        while 1:
+            data = content.read(1024)
+            if not data:
+                break
+            p.feed(data)
+            if p.image:
+                im = p.image
+                break
+        
+        extension = self.find_extension(im.format)
+        
+        # Does the basename already have an extension? If so, replace it.
+        # bare as in without extension
+        bare_basename, _ = os.path.splitext(basename)
+        basename = bare_basename + '.' + extension
+        
+        name = os.path.join(dirname, basename)
+        return super(ImageStorage, self).save(name, content)
+    

File backends/mogile.py

+import urlparse
+import mimetypes
+from StringIO import StringIO
+
+from django.conf import settings
+from django.core.cache import cache
+from django.utils.text import force_unicode
+from django.core.files.storage import Storage
+from django.http import HttpResponse, HttpResponseNotFound
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    import mogilefs
+except ImportError:
+    raise ImproperlyConfigured, "Could not load mogilefs dependency.\
+    \nSee http://mogilefs.pbworks.com/Client-Libraries"
+
+
+class MogileFSStorage(Storage):
+    """MogileFS filesystem storage"""
+    def __init__(self, base_url=settings.MEDIA_URL):
+        
+        # the MOGILEFS_MEDIA_URL overrides MEDIA_URL
+        if hasattr(settings, 'MOGILEFS_MEDIA_URL'):
+            self.base_url = settings.MOGILEFS_MEDIA_URL
+        else:
+            self.base_url = base_url
+                
+        for var in ('MOGILEFS_TRACKERS', 'MOGILEFS_DOMAIN',):
+            if not hasattr(settings, var):
+                raise ImproperlyConfigured, "You must define %s to use the MogileFS backend." % var
+            
+        self.trackers = settings.MOGILEFS_TRACKERS
+        self.domain = settings.MOGILEFS_DOMAIN
+        self.client = mogilefs.Client(self.domain, self.trackers)
+    
+    def get_mogile_paths(self, filename):
+        return self.client.get_paths(filename)  
+    
+    # The following methods define the Backend API
+
+    def filesize(self, filename):
+        raise NotImplemented
+        #return os.path.getsize(self._get_absolute_path(filename))
+    
+    def path(self, filename):
+        paths = self.get_mogile_paths(filename)
+        if paths:
+            return self.get_mogile_paths(filename)[0]
+        else:
+            return None
+    
+    def url(self, filename):
+        return urlparse.urljoin(self.base_url, filename).replace('\\', '/')
+
+    def open(self, filename, mode='rb'):
+        raise NotImplemented
+        #return open(self._get_absolute_path(filename), mode)
+
+    def exists(self, filename):
+        return filename in self.client
+
+    def save(self, filename, raw_contents):
+        filename = self.get_available_filename(filename)
+        
+        if not hasattr(self, 'mogile_class'):
+            self.mogile_class = None
+
+        # Write the file to mogile
+        success = self.client.send_file(filename, StringIO(raw_contents), self.mogile_class)
+        if success:
+            print "Wrote file to key %s, %s@%s" % (filename, self.domain, self.trackers[0])
+        else:
+            print "FAILURE writing file %s" % (filename)
+
+        return force_unicode(filename.replace('\\', '/'))
+
+    def delete(self, filename):
+        
+        self.client.delete(filename)
+            
+        
+def serve_mogilefs_file(request, key=None):
+    """
+    Called when a user requests an image.
+    Either reproxy the path to perlbal, or serve the image outright
+    """
+    # not the best way to do this, since we create a client each time
+    mimetype = mimetypes.guess_type(key)[0] or "application/x-octet-stream"
+    client = mogilefs.Client(settings.MOGILEFS_DOMAIN, settings.MOGILEFS_TRACKERS)
+    if hasattr(settings, "SERVE_WITH_PERLBAL") and settings.SERVE_WITH_PERLBAL:
+        # we're reproxying with perlbal
+        
+        # check the path cache
+        
+        path = cache.get(key)
+
+        if not path:
+            path = client.get_paths(key)
+            cache.set(key, path, 60)
+    
+        if path:
+            response = HttpResponse(content_type=mimetype)
+            response['X-REPROXY-URL'] = path[0]
+        else:
+            response = HttpResponseNotFound()
+    
+    else:
+        # we don't have perlbal, let's just serve the image via django
+        file_data = client[key]
+        if file_data:
+            response = HttpResponse(file_data, mimetype=mimetype)
+        else:
+            response = HttpResponseNotFound()
+    
+    return response

File backends/mosso.py

+"""
+Custom storage for django with Mosso Cloud Files backend.
+Created by Rich Leland <rich@richleland.com>.
+"""
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files import File
+from django.core.files.storage import Storage
+from django.utils.text import get_valid_filename
+
+try:
+    import cloudfiles
+    from cloudfiles.errors import NoSuchObject
+except ImportError:
+    raise ImproperlyConfigured("Could not load cloudfiles dependency. See "
+                               "http://www.mosso.com/cloudfiles.jsp.")
+
+# TODO: implement TTL into cloudfiles methods
+CLOUDFILES_TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
+
+
+def cloudfiles_upload_to(self, filename):
+    """
+    Simple, custom upload_to because Cloud Files doesn't support
+    nested containers (directories).
+
+    Actually found this out from @minter:
+    @richleland The Cloud Files APIs do support pseudo-subdirectories, by
+    creating zero-byte files with type application/directory.
+
+    May implement in a future version.
+    """
+    return get_valid_filename(filename)
+
+
+class CloudFilesStorage(Storage):
+    """
+    Custom storage for Mosso Cloud Files.
+    """
+    default_quick_listdir = True
+
+    def __init__(self, username=None, api_key=None, container=None,
+                 connection_kwargs=None):
+        """
+        Initialize the settings for the connection and container.
+        """
+        self.username = username or settings.CLOUDFILES_USERNAME
+        self.api_key = api_key or settings.CLOUDFILES_API_KEY
+        self.container_name = container or settings.CLOUDFILES_CONTAINER
+        self.connection_kwargs = connection_kwargs or {}
+
+    def __getstate__(self):
+        """
+        Return a picklable representation of the storage.
+        """
+        return dict(username=self.username,
+                    api_key=self.api_key,
+                    container_name=self.container_name,
+                    connection_kwargs=self.connection_kwargs)
+
+    def _get_connection(self):
+        if not hasattr(self, '_connection'):
+            self._connection = cloudfiles.get_connection(self.username,
+                                    self.api_key, **self.connection_kwargs)
+        return self._connection
+
+    def _set_connection(self, value):
+        self._connection = value
+
+    connection = property(_get_connection, _set_connection)
+
+    def _get_container(self):
+        if not hasattr(self, '_container'):
+            self.container = self.connection.get_container(
+                                                        self.container_name)
+        return self._container
+
+    def _set_container(self, container):
+        """
+        Set the container, making it publicly available (on Limelight CDN) if
+        it is not already.
+        """
+        if not container.is_public():
+            container.make_public()
+        if hasattr(self, '_container_public_uri'):
+            delattr(self, '_container_public_uri')
+        self._container = container
+
+    container = property(_get_container, _set_container)
+
+    def _get_container_url(self):
+        if not hasattr(self, '_container_public_uri'):
+            self._container_public_uri = self.container.public_uri()
+        return self._container_public_uri
+
+    container_url = property(_get_container_url)
+
+    def _get_cloud_obj(self, name):
+        """
+        Helper function to get retrieve the requested Cloud Files Object.
+        """
+        return self.container.get_object(name)
+
+    def _open(self, name, mode='rb'):
+        """
+        Return the CloudFilesStorageFile.
+        """
+        return CloudFilesStorageFile(storage=self, name=name)
+
+    def _save(self, name, content):
+        """
+        Use the Cloud Files service to write ``content`` to a remote file
+        (called ``name``).
+        """
+        content.open()
+        cloud_obj = self.container.create_object(name)
+        cloud_obj.size = content.file.size
+        # If the content type is available, pass it in directly rather than
+        # getting the cloud object to try to guess.
+        if hasattr(content.file, 'content_type'):
+            cloud_obj.content_type = content.file.content_type
+        cloud_obj.send(content)
+        content.close()
+        return name
+
+    def delete(self, name):
+        """
+        Deletes the specified file from the storage system.
+        """
+        self.container.delete_object(name)
+
+    def exists(self, name):
+        """
+        Returns True if a file referenced by the given name already exists in
+        the storage system, or False if the name is available for a new file.
+        """
+        try:
+            self._get_cloud_obj(name)
+            return True
+        except NoSuchObject:
+            return False
+
+    def listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple; the
+        first being an empty list of directories (not available for quick-
+        listing), the second being a list of filenames.
+
+        If the list of directories is required, use the full_listdir method.
+        """
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(path=path):
+            files.append(name[path_len:])
+        return ([], files)
+
+    def full_listdir(self, path):
+        """
+        Lists the contents of the specified path, returning a 2-tuple of lists;
+        the first item being directories, the second item being files.
+
+        On large containers, this may be a slow operation for root containers
+        because every single object must be returned (cloudfiles does not
+        provide an explicit way of listing directories).
+        """
+        dirs = set()
+        files = []
+        if path and not path.endswith('/'):
+            path = '%s/' % path
+        path_len = len(path)
+        for name in self.container.list_objects(prefix=path):
+            name = name[path_len:]
+            slash = name[1:-1].find('/') + 1
+            if slash:
+                dirs.add(name[:slash])
+            elif name:
+                files.append(name)
+        dirs = list(dirs)
+        dirs.sort()
+        return (dirs, files)
+
+    def size(self, name):
+        """
+        Returns the total size, in bytes, of the file specified by name.
+        """
+        return self._get_cloud_obj(name).size
+
+    def url(self, name):
+        """
+        Returns an absolute URL where the file's contents can be accessed
+        directly by a web browser.
+        """
+        return '%s/%s' % (self.container_url, name)
+
+
+class CloudFilesStorageFile(File):
+    closed = False
+
+    def __init__(self, storage, name, *args, **kwargs):
+        self._storage = storage
+        super(CloudFilesStorageFile, self).__init__(file=None, name=name,
+                                                    *args, **kwargs)
+
+    def _get_size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self.name)
+        return self._size
+
+    def _set_size(self, size):
+        self._size = size
+
+    size = property(_get_size, _set_size)
+
+    def _get_file(self):
+        if not hasattr(self, '_file'):
+            self._file = self._storage._get_cloud_obj(self.name)
+        return self._file
+
+    def _set_file(self, value):
+        if value is None:
+            if hasattr(self, '_file'):
+                del self._file
+        else:
+            self._file = value
+
+    file = property(_get_file, _set_file)
+
+    def read(self, num_bytes=None):
+        data = self.file.read(size=num_bytes or -1, offset=self._pos)
+        self._pos += len(data)
+        return data
+
+    def open(self, *args, **kwargs):
+        """
+        Open the cloud file object.
+        """
+        self.file
+        self._pos = 0
+
+    def close(self, *args, **kwargs):
+        self._pos = 0
+
+    @property
+    def closed(self):
+        return not hasattr(self, '_file')
+
+    def seek(self, pos):
+        self._pos = pos

File backends/overwrite.py

+import os
+
+from django.conf import settings
+from django.core.files.storage import FileSystemStorage
+
+class OverwriteStorage(FileSystemStorage):
+    """
+    Comes from http://www.djangosnippets.org/snippets/976/
+    (even if it already exists in S3Storage for ages)
+    
+    See also Django #4339, which might add this functionality to core.
+    """
+    
+    def get_available_name(self, name):
+        """
+        Returns a filename that's free on the target storage system, and
+        available for new content to be written to.
+        """
+        if self.exists(name):
+            self.delete(name)
+        return name

File backends/s3.py

+import os
+import mimetypes
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+
+try:
+    from S3 import AWSAuthConnection, QueryStringAuthGenerator
+except ImportError:
+    raise ImproperlyConfigured, "Could not load amazon's S3 bindings.\
+    \nSee http://developer.amazonwebservices.com/connect/entry.jspa?externalID=134"
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+HEADERS = 'AWS_HEADERS'
+
+DEFAULT_ACL= getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
+QUERYSTRING_ACTIVE= getattr(settings, 'AWS_QUERYSTRING_ACTIVE', False)
+QUERYSTRING_EXPIRE= getattr(settings, 'AWS_QUERYSTRING_EXPIRE', 60)
+SECURE_URLS= getattr(settings, 'AWS_S3_SECURE_URLS', False)
+
+
+class S3Storage(Storage):
+    """Amazon Simple Storage Service"""
+
+    def __init__(self, bucket=settings.AWS_STORAGE_BUCKET_NAME,
+            access_key=None, secret_key=None, acl=DEFAULT_ACL,
+            calling_format=settings.AWS_CALLING_FORMAT):
+        self.bucket = bucket
+        self.acl = acl
+
+        if not access_key and not secret_key:
+            access_key, secret_key = self._get_access_keys()
+
+        self.connection = AWSAuthConnection(access_key, secret_key,
+                            calling_format=calling_format)
+        self.generator = QueryStringAuthGenerator(access_key, secret_key, 
+                            calling_format=calling_format,
+                            is_secure=SECURE_URLS)
+        self.generator.set_expires_in(QUERYSTRING_EXPIRE)
+        
+        self.headers = getattr(settings, HEADERS, {})
+
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+
+        return None, None
+
+    def _get_connection(self):
+        return AWSAuthConnection(*self._get_access_keys())
+
+    def _clean_name(self, name):
+        # Useful for windows' paths
+        return os.path.normpath(name).replace('\\', '/')
+
+    def _put_file(self, name, content):
+        content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
+        self.headers.update({
+            'x-amz-acl': self.acl, 
+            'Content-Type': content_type,
+            'Content-Length' : len(content),
+        })
+        response = self.connection.put(self.bucket, name, content, self.headers)
+        if response.http_response.status not in (200, 206):
+            raise IOError("S3StorageError: %s" % response.message)
+
+    def _open(self, name, mode='rb'):
+        name = self._clean_name(name)
+        remote_file = S3StorageFile(name, self, mode=mode)
+        return remote_file
+
+    def _read(self, name, start_range=None, end_range=None):
+        name = self._clean_name(name)
+        if start_range is None:
+            headers = {}
+        else:
+            headers = {'Range': 'bytes=%s-%s' % (start_range, end_range)}
+        response = self.connection.get(self.bucket, name, headers)
+        if response.http_response.status not in (200, 206):
+            raise IOError("S3StorageError: %s" % response.message)
+        headers = response.http_response.msg
+        return response.object.data, headers.get('etag', None), headers.get('content-range', None)
+        
+    def _save(self, name, content):
+        name = self._clean_name(name)
+        content.open()
+        if hasattr(content, 'chunks'):
+            content_str = ''.join(chunk for chunk in content.chunks())
+        else:
+            content_str = content.read()
+        self._put_file(name, content_str)
+        return name
+    
+    def delete(self, name):
+        name = self._clean_name(name)
+        response = self.connection.delete(self.bucket, name)
+        if response.http_response.status != 204:
+            raise IOError("S3StorageError: %s" % response.message)
+
+    def exists(self, name):
+        name = self._clean_name(name)
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        return response.status == 200
+
+    def size(self, name):
+        name = self._clean_name(name)
+        response = self.connection._make_request('HEAD', self.bucket, name)
+        content_length = response.getheader('Content-Length')
+        return content_length and int(content_length) or 0
+    
+    def url(self, name):
+        name = self._clean_name(name)
+        if QUERYSTRING_ACTIVE:
+            return self.generator.generate_url('GET', self.bucket, name)
+        else:
+            return self.generator.make_bare_url(self.bucket, name)
+
+    ## UNCOMMENT BELOW IF NECESSARY
+    #def get_available_name(self, name):
+    #    """ Overwrite existing file with the same name. """
+    #    name = self._clean_name(name)
+    #    return name
+
+
+class S3StorageFile(File):
+    def __init__(self, name, storage, mode):
+        self._name = name
+        self._storage = storage
+        self._mode = mode
+        self._is_dirty = False
+        self.file = StringIO()
+        self.start_range = 0
+    
+    @property
+    def size(self):
+        if not hasattr(self, '_size'):
+            self._size = self._storage.size(self._name)
+        return self._size
+
+    def read(self, num_bytes=None):
+        if num_bytes is None:
+            args = []
+            self.start_range = 0
+        else:
+            args = [self.start_range, self.start_range+num_bytes-1]
+        data, etags, content_range = self._storage._read(self._name, *args)
+        if content_range is not None:
+            current_range, size = content_range.split(' ', 1)[1].split('/', 1)
+            start_range, end_range = current_range.split('-', 1)
+            self._size, self.start_range = int(size), int(end_range)+1
+        self.file = StringIO(data)
+        return self.file.getvalue()
+
+    def write(self, content):
+        if 'w' not in self._mode:
+            raise AttributeError("File was opened for read-only access.")
+        self.file = StringIO(content)
+        self._is_dirty = True
+
+    def close(self):
+        if self._is_dirty:
+            self._storage._put_file(self._name, self.file.getvalue())
+        self.file.close()

File backends/s3boto.py

+import os
+
+from django.conf import settings
+from django.core.files.base import File
+from django.core.files.storage import Storage
+from django.utils.functional import curry
+from django.core.exceptions import ImproperlyConfigured
+
+try:
+    from boto.s3.connection import S3Connection
+    from boto.s3.key import Key
+except ImportError:
+    raise ImproperlyConfigured, "Could not load Boto's S3 bindings.\
+    \nSee http://code.google.com/p/boto/"
+
+ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
+SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
+HEADERS         = 'AWS_HEADERS'
+BUCKET_NAME     = 'AWS_STORAGE_BUCKET_NAME'
+DEFAULT_ACL     = 'AWS_DEFAULT_ACL'
+QUERYSTRING_AUTH = 'AWS_QUERYSTRING_AUTH'
+QUERYSTRING_EXPIRE = 'AWS_QUERYSTRING_EXPIRE'
+
+BUCKET_PREFIX     = getattr(settings, BUCKET_NAME, {})
+HEADERS           = getattr(settings, HEADERS, {})
+DEFAULT_ACL       = getattr(settings, DEFAULT_ACL, 'public-read')
+QUERYSTRING_AUTH  = getattr(settings, QUERYSTRING_AUTH, True)
+QUERYSTRING_EXPIRE= getattr(settings, QUERYSTRING_EXPIRE, 3600)
+
+
+class S3BotoStorage(Storage):
+    """Amazon Simple Storage Service using Boto"""
+    
+    def __init__(self, bucket="root", bucketprefix=BUCKET_PREFIX, 
+            access_key=None, secret_key=None, acl=DEFAULT_ACL, headers=HEADERS):
+        self.acl = acl
+        self.headers = headers
+        
+        if not access_key and not secret_key:
+             access_key, secret_key = self._get_access_keys()
+        
+        self.connection = S3Connection(access_key, secret_key)
+        self.bucket = self.connection.create_bucket(bucketprefix + bucket)
+        self.bucket.set_acl(self.acl)
+    
+    def _get_access_keys(self):
+        access_key = getattr(settings, ACCESS_KEY_NAME, None)
+        secret_key = getattr(settings, SECRET_KEY_NAME, None)
+        if (access_key or secret_key) and (not access_key or not secret_key):
+            access_key = os.environ.get(ACCESS_KEY_NAME)
+            secret_key = os.environ.get(SECRET_KEY_NAME)
+        
+        if access_key and secret_key:
+            # Both were provided, so use them
+            return access_key, secret_key
+        
+        return None, None
+    
+    def _clean_name(self, name):
+        # Useful for windows' paths
+        return os.path.normpath(name).replace('\\', '/')
+
+    def _open(self, name, mode='rb'):
+        name = self._clean_name(name)
+        return S3BotoStorageFile(name, mode, self)
+    
+    def _save(self, name, content):
+        name = self._clean_name(name)
+        headers = self.headers
+        if hasattr(content.file, 'content_type'):
+            headers['Content-Type'] = content.file.content_type
+        content.name = name
+        k = self.bucket.get_key(name)
+        if not k:
+            k = self.bucket.new_key(name)
+        k.set_contents_from_file(content, headers=headers, policy=self.acl)
+        return name
+    
+    def delete(self, name):
+        name = self._clean_name(name)
+        self.bucket.delete_key(name)
+    
+    def exists(self, name):
+        name = self._clean_name(name)
+        k = Key(self.bucket, name)
+        return k.exists()
+    
+    def listdir(self, name):
+        name = self._clean_name(name)
+        return [l.name for l in self.bucket.list() if not len(name) or l.name[:len(name)] == name]
+    
+    def size(self, name):
+        name = self._clean_name(name)
+        return self.bucket.get_key(name).size
+    
+    def url(self, name):
+        name = self._clean_name(name)
+        return self.bucket.get_key(name).generate_url(QUERYSTRING_EXPIRE, method='GET', query_auth=QUERYSTRING_AUTH)
+    
+    def get_available_name(self, name):
+        """ Overwrite existing file with the same name. """
+        name = self._clean_name(name)
+        return name
+
+
+class S3BotoStorageFile(File):
+    def __init__(self, name, mode, storage):
+        self._storage = storage
+        self._name = name
+        self._mode = mode
+        self.key = storage.bucket.get_key(name)
+    
+    def size(self):
+        return self.key.size
+    
+    def read(self, *args, **kwargs):
+        return self.key.read(*args, **kwargs)
+    
+    def write(self, content):
+        self.key.set_contents_from_string(content, headers=self._storage.headers, acl=self._storage.acl)
+    
+    def close(self):
+        self.key.close()

File backends/symlinkorcopy.py

+import os
+
+from django.conf import settings
+from django.core.files.storage import FileSystemStorage
+
+__doc__ = """
+I needed to efficiently create a mirror of a directory tree (so that 
+"origin pull" CDNs can automatically pull files). The trick was that 
+some files could be modified, and some could be identical to the original. 
+Of course it doesn't make sense to store the exact same data twice on the 
+file system. So I created SymlinkOrCopyStorage.
+
+SymlinkOrCopyStorage allows you to symlink a file when it's identical to 
+the original file and to copy the file if it's modified.
+Of course, it's impossible to know if a file is modified just by looking 
+at the file, without knowing what the original file was.
+That's what the symlinkWithin parameter is for. It accepts one or more paths 
+(if multiple, they should be concatenated using a colon (:)). 
+Files that will be saved using SymlinkOrCopyStorage are then checked on their 
+location: if they are within one of the symlink_within directories, 
+they will be symlinked, otherwise they will be copied.
+
+The rationale is that unmodified files will exist in their original location, 
+e.g. /htdocs/example.com/image.jpg and modified files will be stored in 
+a temporary directory, e.g. /tmp/image.jpg.
+"""
+
+class SymlinkOrCopyStorage(FileSystemStorage):
+    """Stores symlinks to files instead of actual files whenever possible
+    
+    When a file that's being saved is currently stored in the symlink_within
+    directory, then symlink the file. Otherwise, copy the file.
+    """
+    def __init__(self, location=settings.MEDIA_ROOT, base_url=settings.MEDIA_URL, 
+            symlink_within=None):