Commits

Dhananjay Nene committed c52a4d9

Initial commit

  • Participants

Comments (0)

Files changed (7)

docs/source/conf.py

+# -*- coding: utf-8 -*-
+#
+# pybuckets documentation build configuration file, created by
+# sphinx-quickstart on Tue Mar 23 00:25:35 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.append(os.path.abspath('../../src'))
+
+# -- General configuration -----------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'pybuckets'
+copyright = u'2010, Dhananjay Nene'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.0.1'
+# The full version, including alpha/beta/rc tags.
+release = '0.0.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+#unused_docs = []
+
+# List of directories, relative to source directory, that shouldn't be searched
+# for source files.
+exclude_trees = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  Major themes that come with
+# Sphinx are currently 'default' and 'sphinxdoc'.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = ''
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'pybucketsdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'pybuckets.tex', u'pybuckets Documentation',
+   u'Dhananjay Nene', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}

docs/source/index.rst

+.. pybuckets documentation master file, created by
+   sphinx-quickstart on Tue Mar 23 00:25:35 2010.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+pybuckets
+*********
+
+.. toctree::
+   :maxdepth: 1
+
+Contents:
+
+* `Introduction`_
+* `Usage`_
+* `Supported Storage Providers`_
+
+Introduction
+============
+
+**pybuckets** is an open source library to help you quickly and easily access cloud based storage services eg. `Amazon S3 <http://aws.amazon.com/s3>`_, `Rackspace Cloudfiles <http://www.rackspacecloud.com/cloud_hosting_products/files>`_. 
+
+Characteristics
+---------------
+
+Some of the characteristics of *pybuckets* are described below
+
+Simple Idiomatic Interface
+++++++++++++++++++++++++++
+
+*pybuckets* attempts to provide a simple idiomatic interface to these storage providers and attempts to emulate the widely used `python dictionary <http://docs.python.org/tutorial/datastructures.html#dictionaries>`_ interface. It models a storage provider as a Bucket Server which is a dictionary like object containing buckets, each bucket accessible using its key unique to the server. Similarly each bucket is also similar to a dictionary with it containing a number of arbitrary file or string like objects, each one of them also being identified by a key unique to that bucket.
+
+Finer Control and Storage Provider specific capabilities
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+While the simple dictionary interface is likely useful for an overriding majority of situations, there will clearly be scenarios when a finer control will be required. *pybuckets* again provides the same through a more conventional Object oriented API. Finally despite *pybuckets* attempt to provide a common interface to a variety of storage providers, there will always remain features that are specific to a particular storage provider, or features that have a peculiar implementation for a particular storage provider. *pybuckets* again attempts to allow users to leverage such capabilities by using the storage provider specific API directly.
+
+Uses existing python packages
++++++++++++++++++++++++++++++
+
+Each storage provider already has a set of python packages that offer an API to the storage provider. *pybuckets* does not attempt to rewrite such packages. Instead it essentially wraps these packages to allow its users to use a simple, portable API across all such packages or storage providers.
+
+Builtin Local File System storage provider
+++++++++++++++++++++++++++++++++++++++++++
+
+For purposes of easy testing, *pybuckets* also provides a built in local file system provider. This provider implements *pybuckets* using the local filesystem. This can assist in early development or in scenarios where it is not practical to use the storage providers used in production during development or testing stages.
+
+Usage
+=====
+
+Code snippets
+-------------
+
+The following code snippets show how *pybuckets* can be used. For purposes of demonstration, the code below uses sample code for accessing Amazon S3.
+
+Activating storage provider support
++++++++++++++++++++++++++++++++++++
+
+*pybuckets* comes with support for a set of storage providers. Activating support for a storage provider simply requires the relevant modules to be imported. In case of Amazon S3, this support is provided by the *pybuckets.botos3* module which wraps the excellent boto libraries for accessing Amazon S3. This module is activated as follows::
+
+    import pybuckets.botos3
+
+Initialising a server
++++++++++++++++++++++
+
+The primary class representing the storage provider is the Bucket Server. Initialise the bucket server as follows::
+
+    aws_key = '..........' # provide your aws key here
+    aws_password = '..........' # provide your aws password here
+    bucket_server = BucketServer.get_server('botos3', key = aws_key, password = aws_password)
+
+Note that the keyword arguments passed to the *get_server()* method are specific to the particular storage provider. These could include path information, authentication credentials, default policies etc.
+
+
+Getting list of bucket names on the server
+++++++++++++++++++++++++++++++++++++++++++
+
+To get a list of the bucket names stored on the server use the *keys()* method::
+
+    for key in bucket_server.keys() :
+        print key
+
+
+Creating a new bucket on the server
++++++++++++++++++++++++++++++++++++
+
+To create a new bucket use the *create_bucket* method.::
+
+    bucket = bucket_server.create_bucket('new_bucket_name')
+
+.. note:: In many cases if the bucket already exists the operation will still succeed if the bucket has been created using the same credentials earlier.
+
+Getting a bucket given its key
+++++++++++++++++++++++++++++++
+
+To get a bucket in the server just use the familiar *[]* operator.::
+
+    bucket = bucket_server['my_bucket']
+
+
+Removing a bucket
++++++++++++++++++
+
+To remove a bucket just use the *del* operator with the bucket name::
+
+    del bucket_server['my_bucket']
+
+.. note:: In most cases if the bucket is not already empty, this operation will fail. In such cases you will need to first individually delete all the objects from the bucket and then delete the bucket
+
+Iterating over all the buckets on the server
+++++++++++++++++++++++++++++++++++++++++++++
+
+The *__iter__()* method on the server returns a sequence of all the buckets. To iterate over all the buckets on the server just access the iterator, for example using a for loop.::
+
+    for bucket in bucket_server :
+        print bucket.name
+
+.. note:: Note that while bucket_server.keys() returns a sequence of keys (strings), iterating over the bucket returns a sequence of bucket objects. This is a conscious decision even though it is atypical, to provide an easy api to traverse the bucket_server using its keys to retrieve the buckets or by traversing through the buckets directly.
+
+
+Getting a list of all keys inside a bucket
+++++++++++++++++++++++++++++++++++++++++++
+
+You can get a list of all the keys inside a bucket by again using the familiar *keys()* method or by iterating over the bucket.::
+
+    key_list = bucket.keys()
+
+    # OR #
+
+    for key in bucket :
+        print key
+
+Get the object corresponding to the key in a bucket
++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+To get the object for a given key just use the *[]* operator::
+
+    value = bucket['key']
+
+Set an object corresponding to the key in a bucket
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Again just use the *[]* operator to assign an object to a given key::
+
+    bucket['key'] = value
+    
+
+.. note:: pybuckets does not make any assumptions regarding the compatibility of the provided value with the underlying storage provider. Simple string objects representing the file contents work with S3, Cloudfiles and Local Filesystem providers
+
+Create a new key / object pair in a bucket
+++++++++++++++++++++++++++++++++++++++++++
+
+This operation is the same as `Set an object corresponding to the key in a bucket`_ above. If the key does not already exist, a new key, object pair will get created.
+
+Removing an object from the bucket
+++++++++++++++++++++++++++++++++++
+
+Just use the *del* operator with the key to remove an object from the bucket::
+
+    del bucket['key']
+
+Write the object corresponding to a key to a file
++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. note:: The function operator ie *()* or *__call__* on the bucket has been overloaded to return an object which is like a file and can be used to write to or read from other files. This allows convenient file access. Once again, the *()* operator on the bucket returns an object which can perform stream operations over other files.
+
+To write the contents of a particular key in a bucket to a file::
+
+    with open('target_file','w') as f :
+        bucket('key') >> f
+
+There is also another convenience method provider by which you can write the value to a file by providing the filename::
+
+    bucket('key') >> 'target_file.txt'
+
+
+Read object contents from a file
+++++++++++++++++++++++++++++++++
+
+The leftshift operator ie. *<<* similarly supports reading in values for a given key from a file. To read the contents from an open file::
+
+    with open('myfile.txt','r') as f :
+        bucket('key') << f
+
+
+Whereas to read contents from a specified file name (path), you can::
+
+    bucket('key') << 'myfile.txt'
+
+Supported Storage Providers
+===========================
+
+Currently supported storage providers are Amazon S3, Rackspace Cloudfiles and a Local filesystem.
+
+.. note:: This documentation is still under construction
+

src/pybuckets/__init__.py

+""" A bucket based key value library for various storage systems
+
+.. moduleauthor:: Dhananjay Nene
+"""

src/pybuckets/botos3.py

+from boto.exception import S3CreateError
+from boto.s3.bucket import Bucket as BotoBucket
+from boto.s3.connection import S3Connection
+from boto.s3.key import Key
+from itertools import imap
+from pybuckets import Bucket, BucketServer, DuplicateBucketNameException, \
+    BucketsException
+from pybuckets import server_registry
+import operator
+
+# S3CreateError - body, box_usage, bucket, error_code, error_message,message,reason,request_id,status
+
+class BotoS3BucketIterator(object):
+    def __init__(self,sequence):
+        self.iter = sequence.__iter__()
+    def next(self):
+        return self.iter.next()
+    
+class BotoValueWrapper(object):
+    def __init__(self,botokey):
+        self.botokey = botokey
+    def __rshift__(self,target):
+        if isinstance(target,basestring) :
+            self.botokey.get_contents_to_filename(target)
+        elif hasattr(target,'write') :
+            self.botokey.get_contents_to_file(target)
+        else :
+            raise Exception('invalid destination')
+    def __lshift__(self,source):
+        if isinstance(source,basestring) :
+            self.botokey.set_contents_from_filename(source)
+        elif hasattr(source,'read') :
+            self.botokey.set_contents_from_file(source)
+        else :
+            raise Exception('invalid source')
+        
+class BotoS3Bucket(Bucket):
+    def __init__(self,name,server):
+        self.name = name
+        self.server = server
+        if self.server :
+            self.s3bucket = BotoBucket(self.server.connection,self.name)
+        else :
+            self.s3bucket = None
+    
+    def __iter__(self):
+        return BotoS3BucketIterator(self.keys())
+    
+    def __str__(self):
+        return 'BotoS3Bucket(%s,%s)' % (self.name, str(self.server))
+    
+    def keys(self):
+        return map(operator.attrgetter('name'),self.s3bucket.get_all_keys())
+        
+    def __call__(self,key):
+        k = Key(self.s3bucket)
+        k.key = key
+        return BotoValueWrapper(k)
+
+    def get_data(self,name):
+        k = Key(self.s3bucket)
+        k.key = name
+        return k.get_contents_as_string()
+    
+    def set_data(self,name,value):
+        k = Key(self.s3bucket)
+        k.key = name
+        return k.set_contents_from_string(value)
+
+class BotoS3Server(BucketServer):
+    def __init__(self,**config):
+        super(BotoS3Server,self).__init__()
+        self.key = config['key']
+        self.password = config['password']
+        self.connection = S3Connection(self.key,self.password)
+        for property, default in (('headers',None),('location',''),('policy',None)) :
+            self.config[property] = config.get(property,default)
+            
+    def keys(self):
+        return map(operator.attrgetter('name'),self.connection.get_all_buckets())
+    
+    def get(self,name):
+        return BotoS3Bucket(name,self)
+    
+    def create_bucket(self,name,**kwargs):
+        try :
+            bucket = self.connection.create_bucket(name,
+                                kwargs.get('headers',self.config['headers']),
+                                kwargs.get('location',self.config['location']),
+                                kwargs.get('policy',self.config['policy']),
+                            )
+            return self.create_using_boto_bucket(bucket)
+        except S3CreateError as s3e :
+            if s3e.error_code == u'BucketAlreadyExists' :
+                raise DuplicateBucketNameException(s3e,s3e.bucket)
+            else :
+                raise BucketsException(s3e)
+        except Exception as e:
+            raise BucketsException(e)
+    def __str__(self):
+        return 'BotoS3Server(%s)' % self.key
+            
+    def create_using_boto_bucket(self,boto_bucket):
+        bucket = BotoS3Bucket(boto_bucket.name,self)
+        return bucket
+
+server_registry['botos3'] = BotoS3Server
+

src/pybuckets/local.py

+from pybuckets import BucketsException, BucketServer, server_registry, Bucket
+import operator
+import os
+import shutil
+
+
+class LocalFSException(BucketsException):
+    def __init__(self,msg,e):
+        self.msg = msg
+        self.e = e
+        
+class LocalFSServer(BucketServer):
+    def __init__(self,**config):
+        super(LocalFSServer,self).__init__()
+        self.path = config['path']
+        self.config['_remove_non_empty_buckets'] = config.get('_remove_non_empty_buckets',False)
+        if not os.path.isdir(self.path) :
+            raise LocalFSException('%s must be a directory' % self.path)
+        
+    def get_bucket_names(self):
+        return os.listdir(self.path)
+    
+    def get_bucket(self,name):
+        return LocalFSBucket(name,self)
+    
+    def delete_bucket(self,key):
+        if self.config['_remove_non_empty_buckets'] :
+            shutil.rmtree(os.path.join(self.path,key))
+        else :
+            os.rmdir(os.path.join(self.path,key))
+    
+    def create_bucket(self,name,**kwargs):
+        try :
+            path = os.path.join(self.path,name)
+            if os.path.isdir(path) :
+                return LocalFSBucket(name,self)
+            else :
+                os.mkdir(path)
+                return LocalFSBucket(name,self)
+        except Exception as e:
+            raise LocalFSException(e)
+        
+    def __str__(self):
+        return 'LocalFSServer(%s)' % self.path
+            
+
+class LocalFSBucket(Bucket):
+    def __init__(self,key,server):
+        self.key = key
+        self.server = server
+        self.fullpath = os.path.join(self.server.path,self.key)
+    
+    def __iter__(self):
+        return self.keys().__iter__()
+        
+    def __str__(self):
+        return 'LocalFSBucket(%s,%s)' % (self.key, str(self.server))
+    
+    def keys(self):
+        return os.listdir(self.fullpath)
+#        
+    def get_data(self,key):
+        filepath = os.path.join(self.fullpath,key)
+        with open(filepath,'r') as f:
+            value = f.read()
+        return value
+#    
+    def set_data(self,key,value):
+        filepath = os.path.join(self.fullpath,key)
+        with open(filepath,'w') as f:
+            f.write(value)
+    
+    def delete_key(self,key):
+        filepath = os.path.join(self.fullpath,key)
+        os.unlink(filepath)
+        
+server_registry['localfs'] = LocalFSServer
+
+    
+

src/pybuckets/pybuckets.py

+"""
+Core pybuckets library
+"""
+
+server_registry = {}
+"""Registry for autoregistration of various component libraries"""
+
+class BucketsException(Exception):
+    """Exception class to wrap various exceptions"""
+    def __init__(self,e):
+        self.e = e
+    def __str__(self):
+        return 'BucketsException(%s)' % str(self.e)
+    def __repr__(self):
+        return 'BucketsException(%s)' % str(self.e)
+    
+class DuplicateBucketNameException(BucketsException):
+    "A exception indicating a duplicate bucket name"
+    def __init__(self,e,name):
+        super(DuplicateBucketNameException,self).__init__(e)
+        self.name = name
+    def __str__(self):
+        return 'DuplicateBucketNameException(%s)' % str(self.name)
+    def __repr__(self):
+        return 'DuplicateBucketNameException(%s)' % str(self.name)
+
+class BucketDataReader(object):
+    "Reader class to allow controlled access to reading data streams"
+    def __init__(self,pointer,reader):
+        self.pointer = pointer
+        self.reader = reader
+    def __enter__(self):
+        return self
+    def read(self):
+        return self.reader.read()
+    def __exit__(self,exc_type, exc_value, traceback):
+        print exc_type, exc_value, traceback
+#        self.pointer.close()
+        
+class Bucket(object):
+    "A bucket"
+    def __init__(self,name,server = None):
+        self.name = name
+        self.server = server
+    def __getitem__(self,key):
+        return self.get_data(key)
+    def __setitem__(self,key,val):
+        return self.set_data(key,val)
+    def __delitem__(self,key):
+        return self.delete_key(key)
+    def __str__(self):
+        return 'Bucket(%s,%s)' % (self.name, str(self.server))
+        
+class BucketServerIterator(object):
+    "A bucket server iterator"
+    def __init__(self,server):
+        self.server = server
+        self.keys = self.server.keys().__iter__()
+    def __iter__(self):
+        return self
+    def next(self):
+        return self.server.get(self.keys.next())
+        
+class BucketServer(object):
+    "A bucket server"
+    def __init__(self):
+        self.config = {}
+    def __len__(self):
+        return len(self.keys())
+    def __iter__(self):
+        return BucketServerIterator(self)
+    def __getitem__(self,key):
+        return self.get(key)
+    def __delitem__(self,key):
+        return self.delete_bucket(key)
+    @classmethod
+    def get_server(cls,name,**creds):
+        return server_registry[name](**creds)
+
+
+    
+        

src/pybuckets/rscloudfiles.py

+from pybuckets import BucketsException, BucketServer, server_registry, Bucket
+from pybuckets import BucketDataReader
+import cloudfiles
+import operator
+
+class CloudfilesException(BucketsException):
+    def __init__(self,msg,e):
+        self.msg = msg
+        self.e = e
+        
+class CloudfilesServer(BucketServer):
+    def __init__(self,**config):
+        super(CloudfilesServer,self).__init__()
+        self.username = config['username']
+        self.api_key = config['api_key']
+        self.connection = cloudfiles.get_connection(self.username, self.api_key)
+        
+    def get_bucket_names(self):
+        return map(operator.attrgetter('name'),self.connection.get_all_containers())
+#    
+    def get_bucket(self,name):
+        return CloudfilesBucket(self.connection.get_container(name),self)
+#    
+    def delete_bucket(self,key):
+        self.connection.delete_container(key)
+    
+    def create_bucket(self,name,**kwargs):
+        try :
+            container = self.connection.create_container(name)
+            return CloudfilesBucket(container,self)
+        except Exception as e:
+            raise CloudfilesException(e)
+        
+    def __str__(self):
+        return 'CloudfilesServer(%s)' % self.username
+            
+class CloudfilesReader(BucketDataReader):
+    def __init__(self,obj):
+        super(CloudfilesReader,self).__init__(obj,obj)
+    
+class CloudfilesBucket(Bucket):
+    def __init__(self,key,server):
+        if isinstance(key,basestring) :
+            self.key = key
+            self.container = None
+        elif isinstance(key,cloudfiles.container.Container) :
+            self.key = key.name
+            self.container = key
+        self.server = server
+    
+    def __iter__(self):
+        return self.keys().__iter__()
+        
+    def __str__(self):
+        return 'CloudfilesBucket(%s,%s)' % (self.key, str(self.server))
+#    
+    def keys(self):
+        return map(operator.attrgetter('name'),self.container.get_objects())
+##        
+    def get_data(self,key):
+        obj = self.container.get_object(key)
+        return obj.read()
+    
+    def get_data_reader(self,key):
+        return CloudfilesReader(self.container.get_object(key))
+        
+    def set_data(self,key,value):
+        obj = self.container.create_object(key)
+        obj.write(value)
+#    
+    def delete_key(self,key):
+        self.container.delete_object(key)
+        
+server_registry['cloudfiles'] = CloudfilesServer
+
+    
+