Commits

Mitsukuni Sato committed 3060bb2 Merge

merge from current

Comments (0)

Files changed (26)

 syntax:glob
 
+*.DS_Store
+*.egg
+*.egg-info
 *.elc
+*.gz
+*.log
+*.orig
 *.pyc
-*~
-*.orig
-*.log
 *.swp
 *.tmp
-*.DS_Store
-testdb.sqlite
+*~
+_build/
+build/
+dist/*
 django
 local_settings.py
-dist/*
-*.egg
-*.gz
-*.egg-info
 setuptools*
-build/
+testdb.sqlite
+django-storages change log
+==========================
+
+1.1.3 (2011-08-15)
+******************
+
+* Created this lovely change log
+* Fixed `#89`_: broken StringIO import in CloudFiles backend
+* Merged `pull request #5`_: HashPathStorage path bug
+
+.. _#89: https://bitbucket.org/david/django-storages/issue/89/112-broke-the-mosso-backend
+.. _pull request #5: https://bitbucket.org/david/django-storages/pull-request/5/fixed-path-bug-and-added-testcase-for
+
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	-rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-storages.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-storages.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/django-storages"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-storages"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	make -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."

docs/backends/amazon-S3.rst

+Amazon S3
+=========
+
+Usage
+*****
+
+There are two backend APIs for interacting with S3. The first is the s3 backend (in storages/backends/s3.py) which is simple and based on the Amazon S3 Python library. The second is the s3boto backend (in storages/backends/s3boto.py) which is well-maintained by the community and is generally more robust (including connection pooling, etc...). s3boto requires the python-boto library.
+
+Settings
+--------
+
+``DEFAULT_FILE_STORAGE``
+
+This setting sets the path to the S3 storage class, the first part correspond to the filepath and the second the name of the class, if you've got example.com in your PYTHONPATH and store your storage file in example.com/libs/storages/S3Storage.py, the resulting setting will be::
+
+    DEFAULT_FILE_STORAGE = 'libs.storages.S3Storage.S3Storage'
+
+or if you installed using setup.py::
+
+    DEFAULT_FILE_STORAGE = 'storages.backends.s3.S3Storage'
+
+If you keep the same filename as in repository, it should always end with S3Storage.S3Storage.
+
+To use s3boto, this setting will be::
+
+    DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
+
+``AWS_ACCESS_KEY_ID``
+
+Your Amazon Web Services access key, as a string.
+
+``AWS_SECRET_ACCESS_KEY``
+
+Your Amazon Web Services secret access key, as a string.
+
+``AWS_STORAGE_BUCKET_NAME``
+
+Your Amazon Web Services storage bucket name, as a string.
+
+``AWS_CALLING_FORMAT`` (Subdomain hardcoded in s3boto)
+
+The way you'd like to call the Amazon Web Services API, for instance if you prefer subdomains::
+
+    from S3 import CallingFormat
+    AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
+
+``AWS_HEADERS`` (optional)
+
+If you'd like to set headers sent with each file of the storage::
+
+    # see http://developer.yahoo.com/performance/rules.html#expires
+    AWS_HEADERS = {
+        'Expires': 'Thu, 15 Apr 2010 20:00:00 GMT',
+        'Cache-Control': 'max-age=86400',
+    }
+
+To allow ``django-admin.py`` collectstatic to automatically put your static files in your bucket set the following in your settings.py::
+
+    STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
+
+Fields
+------
+
+Once you're done, default_storage will be the S3 storage::
+
+    >>> from django.core.files.storage import default_storage
+    >>> print default_storage.__class__
+    <class 'S3Storage.S3Storage'>
+
+The above doesn't seem to be true for django 1.3+ instead look at::
+
+    >>> from django.core.files.storage import default_storage
+    >>> print default_storage.connection
+    S3Connection:s3.amazonaws.com
+
+This way, if you define a new FileField, it will use the S3 storage::
+
+    >>> from django.db import models
+    >>> class Resume(models.Model):
+    ...     pdf = models.FileField(upload_to='pdfs')
+    ...     photos = models.ImageField(upload_to='photos')
+    ...
+    >>> resume = Resume()
+    >>> print resume.pdf.storage
+    <S3Storage.S3Storage object at ...>
+
+Tests
+*****
+
+Initialization::
+
+    >>> from django.core.files.storage import default_storage
+    >>> from django.core.files.base import ContentFile
+    >>> from django.core.cache import cache
+    >>> from models import MyStorage
+
+Storage
+-------
+
+Standard file access options are available, and work as expected::
+
+    >>> default_storage.exists('storage_test')
+    False
+    >>> file = default_storage.open('storage_test', 'w')
+    >>> file.write('storage contents')
+    >>> file.close()
+
+    >>> default_storage.exists('storage_test')
+    True
+    >>> file = default_storage.open('storage_test', 'r')
+    >>> file.read()
+    'storage contents'
+    >>> file.close()
+
+    >>> default_storage.delete('storage_test')
+    >>> default_storage.exists('storage_test')
+    False
+
+Model
+-----
+
+An object without a file has limited functionality::
+
+    >>> obj1 = MyStorage()
+    >>> obj1.normal
+    <FieldFile: None>
+    >>> obj1.normal.size
+    Traceback (most recent call last):
+    ...
+    ValueError: The 'normal' attribute has no file associated with it.
+
+Saving a file enables full functionality::
+
+    >>> obj1.normal.save('django_test.txt', ContentFile('content'))
+    >>> obj1.normal
+    <FieldFile: tests/django_test.txt>
+    >>> obj1.normal.size
+    7
+    >>> obj1.normal.read()
+    'content'
+
+Files can be read in a little at a time, if necessary::
+
+    >>> obj1.normal.open()
+    >>> obj1.normal.read(3)
+    'con'
+    >>> obj1.normal.read()
+    'tent'
+    >>> '-'.join(obj1.normal.chunks(chunk_size=2))
+    'co-nt-en-t'
+
+Save another file with the same name::
+
+    >>> obj2 = MyStorage()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+    >>> obj2.normal.size
+    12
+
+Push the objects into the cache to make sure they pickle properly::
+
+    >>> cache.set('obj1', obj1)
+    >>> cache.set('obj2', obj2)
+    >>> cache.get('obj2').normal
+    <FieldFile: tests/django_test_.txt>
+
+Deleting an object deletes the file it uses, if there are no other objects still using that file::
+
+    >>> obj2.delete()
+    >>> obj2.normal.save('django_test.txt', ContentFile('more content'))
+    >>> obj2.normal
+    <FieldFile: tests/django_test_.txt>
+
+Default values allow an object to access a single file::
+
+    >>> obj3 = MyStorage.objects.create()
+    >>> obj3.default
+    <FieldFile: tests/default.txt>
+    >>> obj3.default.read()
+    'default content'
+
+But it shouldn't be deleted, even if there are no more objects using it::
+
+    >>> obj3.delete()
+    >>> obj3 = MyStorage()
+    >>> obj3.default.read()
+    'default content'
+
+Verify the fix for #5655, making sure the directory is only determined once::
+
+    >>> obj4 = MyStorage()
+    >>> obj4.random.save('random_file', ContentFile('random content'))
+    >>> obj4.random
+    <FieldFile: .../random_file>
+
+Clean up the temporary files::
+
+    >>> obj1.normal.delete()
+    >>> obj2.normal.delete()
+    >>> obj3.default.delete()
+    >>> obj4.random.delete()

docs/backends/couchdb.rst

+CouchDB
+=======
+
+A custom storage system for Django with CouchDB backend.
+

docs/backends/database.rst

+Database
+========
+
+Class DatabaseStorage can be used with either FileField or ImageField. It can be used to map filenames to database blobs: so you have to use it with a special additional table created manually. The table should contain a pk-column for filenames (better to use the same type that FileField uses: nvarchar(100)), blob field (image type for example) and size field (bigint). You can't just create blob column in the same table, where you defined FileField, since there is no way to find required row in the save() method. Also size field is required to obtain better perfomance (see size() method).
+
+So you can use it with different FileFields and even with different "upload_to" variables used. Thus it implements a kind of root filesystem, where you can define dirs using "upload_to" with FileField and store any files in these dirs.
+
+It uses either settings.DB_FILES_URL or constructor param 'base_url' (see __init__()) to create urls to files. Base url should be mapped to view that provides access to files. To store files in the same table, where FileField is defined you have to define your own field and provide extra argument (e.g. pk) to save().
+
+Raw sql is used for all operations. In constructor or in DB_FILES of settings.py () you should specify a dictionary with db_table, fname_column, blob_column, size_column and 'base_url'. For example I just put to the settings.py the following line::
+
+    DB_FILES = {
+        'db_table': 'FILES',
+        'fname_column':  'FILE_NAME',
+        'blob_column': 'BLOB',
+        'size_column': 'SIZE',
+        'base_url': 'http://localhost/dbfiles/'
+    }
+
+And use it with ImageField as following::
+
+    player_photo = models.ImageField(upload_to="player_photos", storage=DatabaseStorage() )
+
+DatabaseStorage class uses your settings.py file to perform custom connection to your database.
+
+The reason to use custom connection: http://code.djangoproject.com/ticket/5135 Connection string looks like::
+
+    cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=localhost;DATABASE=testdb;UID=me;PWD=pass')
+
+It's based on pyodbc module, so can be used with any database supported by pyodbc. I've tested it with MS Sql Express 2005.
+
+Note: It returns special path, which should be mapped to special view, which returns requested file::
+
+    def image_view(request, filename):
+        import os
+        from django.http import HttpResponse
+        from django.conf import settings
+        from django.utils._os import safe_join
+        from filestorage import DatabaseStorage
+        from django.core.exceptions import ObjectDoesNotExist
+
+        storage = DatabaseStorage()
+
+        try:
+            image_file = storage.open(filename, 'rb')
+            file_content = image_file.read()
+        except:
+            filename = 'no_image.gif'
+            path = safe_join(os.path.abspath(settings.MEDIA_ROOT), filename)
+            if not os.path.exists(path):
+                raise ObjectDoesNotExist
+            no_image = open(path, 'rb')
+            file_content = no_image.read()
+
+        response = HttpResponse(file_content, mimetype="image/jpeg")
+        response['Content-Disposition'] = 'inline; filename=%s'%filename
+        return response
+
+Note: If filename exist, blob will be overwritten, to change this remove get_available_name(self, name), so Storage.get_available_name(self, name) will be used to generate new filename.

docs/backends/ftp.rst

+FTP
+===
+
+.. warning:: This FTP storage is not prepared to work with large files, because it uses memory for temporary data storage. It also does not close FTP connection automatically (but open it lazy and try to reestablish when disconnected).
+
+This implementation was done preliminary for upload files in admin to remote FTP location and read them back on site by HTTP. It was tested mostly in this configuration, so read/write using FTPStorageFile class may break.
+

docs/backends/image.rst

+Image
+=====
+
+A custom FileSystemStorage made for normalizing extensions. It lets PIL look at the file to determine the format and append an always lower-case extension based on the results.
+

docs/backends/mogilefs.rst

+MogileFS
+========
+
+This storage allows you to use MogileFS, it comes from this blog post.
+
+The MogileFS storage backend is fairly simple: it uses URLs (or, rather, parts of URLs) as keys into the mogile database. When the user requests a file stored by mogile (say, an avatar), the URL gets passed to a view which, using a client to the mogile tracker, retrieves the "correct" path (the path that points to the actual file data). The view will then either return the path(s) to perlbal to reproxy, or, if you're not using perlbal to reproxy (which you should), it serves the data of the file directly from django.
+
+* ``MOGILEFS_DOMAIN``: The mogile domain that files should read from/written to, e.g "production"
+* ``MOGILEFS_TRACKERS``: A list of trackers to connect to, e.g. ["foo.sample.com:7001", "bar.sample.com:7001"]
+* ``MOGILEFS_MEDIA_URL`` (optional): The prefix for URLs that point to mogile files. This is used in a similar way to ``MEDIA_URL``, e.g. "/mogilefs/"
+* ``SERVE_WITH_PERLBAL``: Boolean that, when True, will pass the paths back in the response in the ``X-REPROXY-URL`` header. If False, django will serve all mogile media files itself (bad idea for production, but useful if you're testing on a setup that doesn't have perlbal running)
+* ``DEFAULT_FILE_STORAGE``: This is the class that's used for the backend. You'll want to set this to ``project.app.storages.MogileFSStorage`` (or wherever you've installed the backend)
+
+Getting files into mogile
+*************************
+
+The great thing about file backends is that we just need to specify the backend in the model file and everything is taken care for us - all the default save() methods work correctly.
+
+For Fluther, we have two main media types we use mogile for: avatars and thumbnails. Mogile defines "classes" that dictate how each type of file is replicated - so you can make sure you have 3 copies of the original avatar but only 1 of the thumbnail.
+
+In order for classes to behave nicely with the backend framework, we've had to do a little tomfoolery. (This is something that may change in future versions of the filestorage framework).
+
+Here's what the models.py file looks like for the avatars::
+
+    from django.core.filestorage import storage
+
+    # TODO: Find a better way to deal with classes. Maybe a generator?
+    class AvatarStorage(storage.__class__):
+        mogile_class = 'avatar'
+
+    class ThumbnailStorage(storage.__class__):
+        mogile_class = 'thumb'
+
+    class Avatar(models.Model):
+        user = models.ForeignKey(User, null=True, blank=True)
+        image = models.ImageField(storage=AvatarStorage())
+        thumb = models.ImageField(storage=ThumbnailStorage())
+
+Each of the custom storage classes defines a class attribute which gets passed to the mogile backend behind the scenes. If you don't want to worry about mogile classes, don't need to define a custom storage engine or specify it in the field - the default should work just fine.
+
+Serving files from mogile
+*************************
+
+Now, all we need to do is plug in the view that serves up mogile data.
+
+Here's what we use::
+
+    urlpatterns += patterns(",
+        (r'^%s(?P<key>.*)' % settings.MOGILEFS_MEDIA_URL[1:],
+            'MogileFSStorage.serve_mogilefs_file')
+    )
+
+Any url beginning with the value of ``MOGILEFS_MEDIA_URL`` will get passed to our view. Since ``MOGILEFS_MEDIA_URL`` requires a leading slash (like ``MEDIA_URL``), we strip that off and pass the rest of the url over to the view.
+
+That's it! Happy mogiling!

docs/backends/mongodb.rst

+MongoDB
+=======
+
+A GridFS backend that works with django_mongodb_engine and the upcoming GSoC 2010 MongoDB backend which gets developed by Alex Gaynor.
+
+Usage (in settings.py)::
+
+    DATABASES = {
+        'default': {
+            'ENGINE': 'django_mongodb_engine.mongodb',
+            'NAME': 'test',
+            'USER': '',
+            'PASSWORD': '',
+            'HOST': 'localhost',
+            'PORT': 27017,
+            'SUPPORTS_TRANSACTIONS': False,
+        }
+    }
+
+    DEFAULT_FILE_STORAGE = 'storages.backends.mongodb.GridFSStorage'
+    GRIDFS_DATABASE = 'default'

docs/backends/overwrite.rst

+Overwrite
+=========
+
+This is a simple implementation overwrite of the FileSystemStorage. It removes the addition of an '_' to the filename if the file already exists in the storage system. I needed a model in the admin area to act exactly like a file system (overwriting the file if it already exists).
+

docs/backends/rackspace-cloudfiles.rst

+Rackspace CloudFiles
+====================
+
+Requirements
+************
+
+Mosso's Cloud Files python module http://www.mosso.com/cloudfiles.jsp
+
+Usage
+*****
+
+Add the following to your project's settings.py file::
+
+    CLOUDFILES_USERNAME = 'YourUsername'
+    CLOUDFILES_API_KEY = 'YourAPIKey'
+    CLOUDFILES_CONTAINER = 'ContainerName'
+    DEFAULT_FILE_STORAGE = 'backends.mosso.CloudFilesStorage'
+
+Optionally, you can implement the following custom upload_to in your models.py file. This will upload the file using the file name only to Cloud Files (e.g. 'myfile.jpg'). If you supply a string (e.g. upload_to='some/path'), your file name will include the path (e.g. 'some/path/myfile.jpg')::
+
+    from backends.mosso import cloudfiles_upload_to
+
+    class SomeKlass(models.Model):
+        some_field = models.ImageField(upload_to=cloudfiles_upload_to)
+
+Alternatively, if you don't want to set the DEFAULT_FILE_STORAGE, you can do the following in your models::
+
+    from backends.mosso import CloudFilesStorage, cloudfiles_upload_to
+
+    cloudfiles_storage = CloudFilesStorage()
+
+    class SomeKlass(models.Model):
+        some_field = models.ImageField(storage=cloudfiles_storage,
+                                       upload_to=cloudfiles_upload_to)

docs/backends/sftp.rst

+SFTP
+====
+
+Take a look at the top of the backend's file for the documentation.
+

docs/backends/symlinkcopy.rst

+Symlink or copy
+===============
+
+Stores symlinks to files instead of actual files whenever possible
+
+When a file that's being saved is currently stored in the symlink_within directory, then symlink the file. Otherwise, copy the file.
+# -*- coding: utf-8 -*-
+#
+# django-storages documentation build configuration file, created by
+# sphinx-quickstart on Sun Aug 28 13:44:45 2011.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+import storages
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'django-storages'
+copyright = u'2011, David Larlet, et. al.'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = storages.__version__
+# The full version, including alpha/beta/rc tags.
+release = storages.__version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'django-storagesdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'django-storages.tex', u'django-storages Documentation',
+   u'David Larlet, et. al.', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'django-storages', u'django-storages Documentation',
+     [u'David Larlet, et. al.'], 1)
+]
+
+
+# -- Options for Epub output ---------------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = u'django-storages'
+epub_author = u'David Larlet, et. al.'
+epub_publisher = u'David Larlet, et. al.'
+epub_copyright = u'2011, David Larlet, et. al.'
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+#epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+#epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+#epub_identifier = ''
+
+# A unique identification for the text.
+#epub_uid = ''
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+#epub_exclude_files = []
+
+# The depth of the table of contents in toc.ncx.
+#epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+#epub_tocdup = True

docs/django-mogilefs-storage.txt

-================
-MogileFS Storage
-================
-
-The MogileFS storage backend is fairly simple: it uses URLs (or, rather, 
-parts of URLs) as keys into the mogile database. When the user requests a file 
-stored by mogile (say, an avatar), the URL gets passed to a view which, using 
-a client to the mogile tracker, retrieves the "correct" path (the path that 
-points to the actual file data). The view will then either return the path(s) 
-to perlbal to reproxy, or, if you're not using perlbal to reproxy 
-(which you should), it serves the data of the file directly from django.
-
-In order for the backend to work, we need to add a few settings variables:
-
-    * ``MOGILEFS_DOMAIN``: The mogile domain that files should read 
-      from/written to, e.g "production"
-    * ``MOGILEFS_TRACKERS``: A list of trackers to connect to, 
-      e.g. ["foo.sample.com:7001", "bar.sample.com:7001"]
-    * ``MOGILEFS_MEDIA_URL`` (optional): The prefix for URLs that point to 
-      mogile files. This is used in a similar way to ``MEDIA_URL``, 
-      e.g. "/mogilefs/"
-    * ``SERVE_WITH_PERLBAL``: Boolean that, when True, will pass the paths 
-      back in the response in the ``X-REPROXY-URL`` header. If False, django 
-      will serve all mogile media files itself (bad idea for production, 
-      but useful if you're testing on a setup that doesn't have perlbal 
-      running)
-    * ``DEFAULT_FILE_STORAGE``: This is the class that's used for the backend.
-      You'll want to set this to ``project.app.storages.MogileFSStorage``
-      (or wherever you've installed the backend) 
-
- 
-
-Getting files into mogile
--------------------------
-
-The great thing about file backends is that we just need to specify the 
-backend in the model file and everything is taken care for us - all the 
-default save() methods work correctly.
-
-For Fluther, we have two main media types we use mogile for: avatars and 
-thumbnails. Mogile defines "classes" that dictate how each type of file is 
-replicated - so you can make sure you have 3 copies of the original avatar 
-but only 1 of the thumbnail.
-
-In order for classes to behave nicely with the backend framework, we've had to 
-do a little tomfoolery. (This is something that may change in future versions 
-of the filestorage framework).
-
-Here's what the models.py file looks like for the avatars::
-
-    from django.core.filestorage import storage
-    
-    # TODO: Find a better way to deal with classes. Maybe a generator?
-    class AvatarStorage(storage.__class__):
-        mogile_class = 'avatar' 
-    
-    class ThumbnailStorage(storage.__class__):
-        mogile_class = 'thumb'
-    
-    class Avatar(models.Model):
-        user = models.ForeignKey(User, null=True, blank=True)
-        image = models.ImageField(storage=AvatarStorage())
-        thumb = models.ImageField(storage=ThumbnailStorage())
-
-Each of the custom storage classes defines a ``class`` attribute which gets 
-passed to the mogile backend behind the scenes.  If you don't want to worry 
-about mogile classes, don't need to define a custom storage engine or specify 
-it in the field - the default should work just fine.
-
-Serving files from mogile
--------------------------
-
-Now, all we need to do is plug in the view that serves up mogile data. 
-
-Here's what we use::
-
-  urlpatterns += patterns(",
-      (r'^%s(?P<key>.*)' % settings.MOGILEFS_MEDIA_URL[1:], 
-          'MogileFSStorage.serve_mogilefs_file')
-  )
-
-Any url beginning with the value of ``MOGILEFS_MEDIA_URL`` will get passed to 
-our view. Since ``MOGILEFS_MEDIA_URL`` requires a leading slash (like 
-``MEDIA_URL``), we strip that off and pass the rest of the url over to the 
-view.
-
-That's it! Happy mogiling!
+django-storages
+===============
+
+django-storages is a collection of custom storage backends for Django.
+
+.. toctree::
+   :maxdepth: 1
+   :glob:
+
+   backends/*
+
+Installation
+************
+
+Use pip to install from PyPI::
+
+    pip install django-storages
+
+Add ``storages`` to your settings.py file::
+
+    INSTALLED_APPS = (
+        ...
+        'storages',
+        ...
+    )
+
+Each storage backend has its own unique settings you will need to add to your settings.py file. Read the documentation for your storage engine(s) of choice to determine what you need to add.
+
+Contributing
+************
+
+To contribute to django-storages `create a fork`_ on bitbucket. Clone your fork, make some changes, and submit a pull request.
+
+.. _create a fork: https://bitbucket.org/david/django-storages/fork
+
+Issues
+******
+
+Use the bitbucket `issue tracker`_ for django-storages to submit bugs, issues, and feature requests.
+
+.. _issue tracker: https://bitbucket.org/david/django-storages/issues
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+	set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+	:help
+	echo.Please use `make ^<target^>` where ^<target^> is one of
+	echo.  html       to make standalone HTML files
+	echo.  dirhtml    to make HTML files named index.html in directories
+	echo.  singlehtml to make a single large HTML file
+	echo.  pickle     to make pickle files
+	echo.  json       to make JSON files
+	echo.  htmlhelp   to make HTML files and a HTML help project
+	echo.  qthelp     to make HTML files and a qthelp project
+	echo.  devhelp    to make HTML files and a Devhelp project
+	echo.  epub       to make an epub
+	echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+	echo.  text       to make text files
+	echo.  man        to make manual pages
+	echo.  changes    to make an overview over all changed/added/deprecated items
+	echo.  linkcheck  to check all external links for integrity
+	echo.  doctest    to run all doctests embedded in the documentation if enabled
+	goto end
+)
+
+if "%1" == "clean" (
+	for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+	del /q /s %BUILDDIR%\*
+	goto end
+)
+
+if "%1" == "html" (
+	%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+	goto end
+)
+
+if "%1" == "dirhtml" (
+	%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+	goto end
+)
+
+if "%1" == "singlehtml" (
+	%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+	goto end
+)
+
+if "%1" == "pickle" (
+	%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the pickle files.
+	goto end
+)
+
+if "%1" == "json" (
+	%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can process the JSON files.
+	goto end
+)
+
+if "%1" == "htmlhelp" (
+	%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+	goto end
+)
+
+if "%1" == "qthelp" (
+	%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+	echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-storages.qhcp
+	echo.To view the help file:
+	echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-storages.ghc
+	goto end
+)
+
+if "%1" == "devhelp" (
+	%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished.
+	goto end
+)
+
+if "%1" == "epub" (
+	%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The epub file is in %BUILDDIR%/epub.
+	goto end
+)
+
+if "%1" == "latex" (
+	%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+	goto end
+)
+
+if "%1" == "text" (
+	%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The text files are in %BUILDDIR%/text.
+	goto end
+)
+
+if "%1" == "man" (
+	%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Build finished. The manual pages are in %BUILDDIR%/man.
+	goto end
+)
+
+if "%1" == "changes" (
+	%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.The overview file is in %BUILDDIR%/changes.
+	goto end
+)
+
+if "%1" == "linkcheck" (
+	%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+	goto end
+)
+
+if "%1" == "doctest" (
+	%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+	if errorlevel 1 exit /b 1
+	echo.
+	echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+	goto end
+)
+
+:end

storages/__init__.py

-__version__ = '1.2a'
+__version__ = '1.1.3'

storages/backends/hashpath.py

         dir_name, file_name = os.path.split(name)
 
         # Return the name if the file is already there
-        if os.path.exists(name):
+        if self.exists(name):
             return name
 
         # Try to create the directory relative to the media root

storages/backends/mosso.py

 Custom storage for django with Mosso Cloud Files backend.
 Created by Rich Leland <rich@richleland.com>.
 """
+import os
+
 from django.conf import settings
 from django.core.exceptions import ImproperlyConfigured
 from django.core.files import File
 from django.utils.text import get_valid_filename
 
 try:
+    from cStringIO import StringIO
+except:
+    from StringIO import StringIO
+
+try:
     import cloudfiles
     from cloudfiles.errors import NoSuchObject
 except ImportError:
                                "http://www.mosso.com/cloudfiles.jsp.")
 
 # TODO: implement TTL into cloudfiles methods
-CLOUDFILES_TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
+TTL = getattr(settings, 'CLOUDFILES_TTL', 600)
+CONNECTION_KWARGS = getattr(settings, 'CLOUDFILES_CONNECTION_KWARGS', {})
 
 
 def cloudfiles_upload_to(self, filename):
     """
     default_quick_listdir = True
 
-    def __init__(self, username=None, api_key=None, container=None,
-                 connection_kwargs=None):
+    def __init__(self,
+                 username=settings.CLOUDFILES_USERNAME,
+                 api_key=settings.CLOUDFILES_API_KEY,
+                 container=settings.CLOUDFILES_CONTAINER,
+                 connection_kwargs=CONNECTION_KWARGS):
         """
         Initialize the settings for the connection and container.
         """
-        self.username = username or settings.CLOUDFILES_USERNAME
-        self.api_key = api_key or settings.CLOUDFILES_API_KEY
-        self.container_name = container or settings.CLOUDFILES_CONTAINER
-        self.connection_kwargs = connection_kwargs or settings.CLOUDFILES_CONNECTION_KWARGS or {}
+        self.username = username
+        self.api_key = api_key
+        self.container_name = container
+        self.connection_kwargs = connection_kwargs
 
     def __getstate__(self):
         """
         Use the Cloud Files service to write ``content`` to a remote file
         (called ``name``).
         """
+        (path, last) = os.path.split(name)
+        if path:
+            try:
+                self.container.get_object(path)
+            except NoSuchObject:
+                self._save(path, CloudStorageDirectory(path))
 
         cloud_obj = self.container.create_object(name)
         cloud_obj.size = content.size
 
-        content.open()        
+        content.open()
         # If the content type is available, pass it in directly rather than
         # getting the cloud object to try to guess.
         if hasattr(content.file, 'content_type'):
         return '%s/%s' % (self.container_url, name)
 
 
+class CloudStorageDirectory(File):
+    """
+    A File-like object that creates a directory at cloudfiles
+    """
+
+    def __init__(self, name):
+        super(CloudStorageDirectory, self).__init__(StringIO(), name=name)
+        self.file.content_type = 'application/directory'
+        self.size = 0
+
+    def __str__(self):
+        return 'directory'
+
+    def __nonzero__(self):
+        return True
+
+    def open(self, mode=None):
+        self.seek(0)
+
+    def close(self):
+        pass
+
+
 class CloudFilesStorageFile(File):
     closed = False
 
         self._storage = storage
         super(CloudFilesStorageFile, self).__init__(file=None, name=name,
                                                     *args, **kwargs)
+        self._pos = 0
+
 
     def _get_size(self):
         if not hasattr(self, '_size'):
     file = property(_get_file, _set_file)
 
     def read(self, num_bytes=None):
+        if self._pos == self._get_size():
+            return None
+        if self._pos + num_bytes > self._get_size():
+            num_bytes = self._get_size() - self._pos
         data = self.file.read(size=num_bytes or -1, offset=self._pos)
         self._pos += len(data)
         return data

storages/backends/s3.py

 import os
 import mimetypes
+import warnings
 
 try:
     from cStringIO import StringIO
             calling_format=CALLING_FORMAT, encrypt=False,
             gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
             preload_metadata=PRELOAD_METADATA):
+        warnings.warn(
+            "The s3 backend is deprecated and will be removed in version 1.2. "
+            "Use the s3boto backend instead.",
+            PendingDeprecationWarning
+        )
         self.bucket = bucket
         self.acl = acl
         self.encrypt = encrypt

storages/backends/sftpstorage.py

 # SFTP storage backend for Django.
 # Author: Brent Tubbs <brent.tubbs@gmail.com>
 # License: MIT
-# 
+#
 # Modeled on the FTP storage by Rafal Jonca <jonca.rafal@gmail.com>
-# 
+#
 # Settings:
-# 
+#
 # SFTP_STORAGE_HOST - The hostname where you want the files to be saved.
 #
 # SFTP_STORAGE_ROOT - The root directory on the remote host into which files
 # paramiko.SSHClient().connect() (do not include hostname here).  See
 # http://www.lag.net/paramiko/docs/paramiko.SSHClient-class.html#connect for
 # details
-# 
+#
 # SFTP_STORAGE_INTERACTIVE (Optional) - A boolean indicating whether to prompt
 # for a password if the connection cannot be made using keys, and there is not
 # already a password in SFTP_STORAGE_PARAMS.  You can set this to True to
 # enable interactive login when running 'manage.py collectstatic', for example.
-# 
+#
 #   DO NOT set SFTP_STORAGE_INTERACTIVE to True if you are using this storage
 #   for files being uploaded to your site by users, because you'll have no way
 #   to enter the password when they submit the form..
-# 
+#
 # SFTP_STORAGE_FILE_MODE (Optional) - A bitmask for setting permissions on
 # newly-created files.  See http://docs.python.org/library/os.html#os.chmod for
 # acceptable values.
-# 
+#
 # SFTP_STORAGE_DIR_MODE (Optional) - A bitmask for setting permissions on
 # newly-created directories.  See
 # http://docs.python.org/library/os.html#os.chmod for acceptable values.
-# 
+#
 #   Hint: if you start the mode number with a 0 you can express it in octal
 #   just like you would when doing "chmod 775 myfile" from bash.
 #
     from StringIO import StringIO
 
 class SFTPStorage(Storage):
-    
+
     def __init__(self):
         self._host = settings.SFTP_STORAGE_HOST
 
         self._params = getattr(settings, 'SFTP_STORAGE_PARAMS', {})
         self._interactive = getattr(settings, 'SFTP_STORAGE_INTERACTIVE',
                                     False)
-
         self._file_mode = getattr(settings, 'SFTP_STORAGE_FILE_MODE', None)
         self._dir_mode = getattr(settings, 'SFTP_STORAGE_DIR_MODE', None)
-        
+
         self._uid = getattr(settings, 'SFTP_STORAGE_UID', None)
         self._gid = getattr(settings, 'SFTP_STORAGE_GID', None)
-        
+
         self._root_path = settings.SFTP_STORAGE_ROOT
 
         # for now it's all posix paths.  Maybe someday we'll support figuring
         # out if the remote host is windows.
         self._pathmod = posixpath
 
-        # set up connection
-        self._connect()
-
     def _connect(self):
         self._ssh = paramiko.SSHClient()
-        
+
         # automatically add host keys from current user.
         self._ssh.load_host_keys(os.path.expanduser(os.path.join("~", ".ssh", "known_hosts")))
-        
+
         # and automatically add new host keys for hosts we haven't seen before.
         self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
 
-        try: 
+        try:
             self._ssh.connect(self._host, **self._params)
         except paramiko.AuthenticationException, e:
             if self._interactive and 'password' not in self._params:
                 self._connect()
             else:
                 raise paramiko.AuthenticationException, e
-        
+        except Exception, e:
+            print e
+
         if not hasattr(self, '_sftp'):
             self._sftp = self._ssh.open_sftp()
 
+    @property
+    def sftp(self):
+        """Lazy SFTP connection"""
+        if not hasattr(self, '_sftp'):
+            self._connect()
+        return self._sftp
+
     def _join(self, *args):
         # Use the path module for the remote host type to join a path together
         return self._pathmod.join(*args)
 
     def _open(self, name, mode='rb'):
         return SFTPStorageFile(name, self, mode)
-    
+
     def _read(self, name):
         remote_path = self._remote_path(name)
-        return self._sftp.open(remote_path, 'rb')
+        return self.sftp.open(remote_path, 'rb')
 
     def _chown(self, path, uid=None, gid=None):
         """Set uid and/or gid for file at path."""
         # Paramiko's chown requires both uid and gid, so look them up first if
         # we're only supposed to set one.
         if uid is None or gid is None:
-            attr = self._sftp.stat(path)
+            attr = self.sftp.stat(path)
             uid = uid or attr.st_uid
             gid = gid or attr.st_gid
-        self._sftp.chown(path, uid, gid)
+        self.sftp.chown(path, uid, gid)
 
     def _mkdir(self, path):
         """Create directory, recursing up to create parent dirs if
         parent = self._pathmod.dirname(path)
         if not self.exists(parent):
             self._mkdir(parent)
-        self._sftp.mkdir(path)
+        self.sftp.mkdir(path)
 
         if self._dir_mode is not None:
-            self._sftp.chmod(path, self._dir_mode)
+            self.sftp.chmod(path, self._dir_mode)
 
         if self._uid or self._gid:
             self._chown(path, uid=self._uid, gid=self._gid)
 
     def _save(self, name, content):
         """Save file via SFTP."""
-        content.open() 
+        content.open()
         path = self._remote_path(name)
         dirname = self._pathmod.dirname(path)
         if not self.exists(dirname):
             self._mkdir(dirname)
 
-        f = self._sftp.open(path, 'wb')
+        f = self.sftp.open(path, 'wb')
         f.write(content.file.read())
         f.close()
 
         # set file permissions if configured
         if self._file_mode is not None:
-            self._sftp.chmod(path, self._file_mode)
+            self.sftp.chmod(path, self._file_mode)
         if self._uid or self._gid:
             self._chown(path, uid=self._uid, gid=self._gid)
         return name
 
     def delete(self, name):
         remote_path = self._remote_path(name)
-        self._sftp.remove(remote_path)
+        self.sftp.remove(remote_path)
 
     def exists(self, name):
         # Try to retrieve file info.  Return true on success, false on failure.
         remote_path = self._remote_path(name)
         try:
-            self._sftp.stat(remote_path)
+            self.sftp.stat(remote_path)
             return True
         except IOError:
             return False
     def listdir(self, path):
         remote_path = self._remote_path(path)
         dirs, files = [], []
-        for item in self._sftp.listdir_attr(remote_path):
+        for item in self.sftp.listdir_attr(remote_path):
             if self._isdir_attr(item):
                 dirs.append(item.filename)
             else:
 
     def size(self, name):
         remote_path = self._remote_path(name)
-        return self._sftp.stat(remote_path).st_size
+        return self.sftp.stat(remote_path).st_size
 
     def accessed_time(self, name):
         remote_path = self._remote_path(name)
-        utime = self._sftp.stat(remote_path).st_atime
-        return datetime.fromtimestamp(utime) 
-    
+        utime = self.sftp.stat(remote_path).st_atime
+        return datetime.fromtimestamp(utime)
+
     def modified_time(self, name):
         remote_path = self._remote_path(name)
-        utime = self._sftp.stat(remote_path).st_mtime
-        return datetime.fromtimestamp(utime) 
-    
+        utime = self.sftp.stat(remote_path).st_mtime
+        return datetime.fromtimestamp(utime)
+
     def url(self, name):
         remote_path = self._remote_path(name)
         return 'sftp://%s/%s' % (self._host, remote_path)
         self._is_dirty = False
         self.file = StringIO()
         self._is_read = False
-    
+
     @property
     def size(self):
         if not hasattr(self, '_size'):
         if not self._is_read:
             self.file = self._storage._read(self._name)
             self._is_read = True
-            
+
         return self.file.read(num_bytes)
 
     def write(self, content):

Empty file added.

storages/tests/__init__.py

+from storages.tests.hashpath import *

storages/tests/hashpath.py

+import os
+import shutil
+
+from django.test import TestCase
+from django.core.files.base import ContentFile
+from django.conf import settings
+
+from storages.backends.hashpath import HashPathStorage
+
+
+class HashPathStorageTest(TestCase):
+
+    def setUp(self):
+        self.storage = HashPathStorage()
+        
+        # make sure the profile upload folder exists
+        if not os.path.exists(settings.MEDIA_ROOT):
+            os.makedirs(settings.MEDIA_ROOT)
+            
+    def tearDown(self):
+        # remove uploaded profile picture
+        if os.path.exists(settings.MEDIA_ROOT):
+            shutil.rmtree(settings.MEDIA_ROOT)
+
+    def test_save_same_file(self):
+        """
+        saves a file twice, the file should only be stored once, because the
+        content/hash is the same
+        """
+        
+        path_1 = self.storage.save('test', ContentFile('new content'))
+        
+        path_2 = self.storage.save('test', ContentFile('new content'))
+
+        self.assertEqual(path_1, path_2)
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.