Anonymous avatar Anonymous committed fff2669

- remove all trailing whitespace

Comments (0)

Files changed (33)

   used as a cache-invalidation mechanism.
 * file-based locking will not unlink lockfiles; this can interfere
   with the flock() mechanism in the event that a concurrent process
-  is accessing the files.  
+  is accessing the files.
 * Sending "type" and other namespace config arguments to cache.get()/
   cache.put()/cache.remove_value() is deprecated.   The namespace
   configuration is now preferred at the Cache level, i.e. when you construct
 * memcache caching has been vastly improved, no longer stores a list of
   all keys, which along the same theme prevented efficient usage for an 
   arbitrarily large number of keys.  The keys() method is now unimplemented, 
-  and cache.remove() clears the entire memcache cache across all namespaces.  
+  and cache.remove() clears the entire memcache cache across all namespaces.
   This is what the memcache API provides so it's the best we can do. 
 * memcache caching passes along "expiretime" to the memcached "time"
   parameter, so that the cache itself can reduce its size for elements which
   manner as cache.set_value().  This way you can send a new createfunc
   to cache.get_value() each time and it will be used.
 
-  
+
 Release 0.9.5 (6/19/2008)
 =========================
 

beaker/crypto/pbkdf2.py

 # 
 # THE AUTHOR PROVIDES THIS SOFTWARE ``AS IS'' AND ANY EXPRESSED OR 
 # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 
-# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
 # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 
 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
 # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 
 
 class PBKDF2(object):
     """PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation
-    
+
     This implementation takes a passphrase and a salt (and optionally an
     iteration count, a digest module, and a MAC module) and provides a
     file-like object from which an arbitrarily-sized key can be read.
 
     The idea behind PBKDF2 is to derive a cryptographic key from a
     passphrase and a salt.
-    
+
     PBKDF2 may also be used as a strong salted password hash.  The
     'crypt' function is provided for that purpose.
-    
+
     Remember: Keys generated using PBKDF2 are only as strong as the
     passphrases they are derived from.
     """
         """Pseudorandom function.  e.g. HMAC-SHA1"""
         return self.__macmodule(key=key, msg=msg,
             digestmod=self.__digestmodule).digest()
-    
+
     def read(self, bytes):
         """Read the specified number of key bytes."""
         if self.closed:
         self.__buf = buf[bytes:]
         self.__blockNum = i
         return retval
-    
+
     def __f(self, i):
         # i must fit within 32 bits
         assert (1 <= i and i <= 0xffffffff)
             U = self.__prf(self.__passphrase, U)
             result = strxor(result, U)
         return result
-    
+
     def hexread(self, octets):
         """Read the specified number of octets. Return them as hexadecimal.
 
 
     def _setup(self, passphrase, salt, iterations, prf):
         # Sanity checks:
-        
+
         # passphrase and salt must be str or unicode (in the latter
         # case, we convert to UTF-8)
         if isinstance(passphrase, unicode):
             raise TypeError("iterations must be an integer")
         if iterations < 1:
             raise ValueError("iterations must be at least 1")
-        
+
         # prf must be callable
         if not callable(prf):
             raise TypeError("prf must be callable")
         self.__blockNum = 0
         self.__buf = ""
         self.closed = False
-    
+
     def close(self):
         """Close the stream."""
         if not self.closed:
 
 def crypt(word, salt=None, iterations=None):
     """PBKDF2-based unix crypt(3) replacement.
-    
+
     The number of iterations specified in the salt overrides the 'iterations'
     parameter.
 
     The effective hash length is 192 bits.
     """
-    
+
     # Generate a (pseudo-)random salt if the user hasn't provided one.
     if salt is None:
         salt = _makesalt()
             iterations = converted
             if not (iterations >= 1):
                 raise ValueError("Invalid salt")
-    
+
     # Make sure the salt matches the allowed character set
     allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./"
     for ch in salt:
 
 def _makesalt():
     """Return a 48-bit pseudorandom salt for crypt().
-    
+
     This function is not suitable for generating cryptographic secrets.
     """
     binarysalt = "".join([pack("@H", randint(0, 0xffff)) for i in range(3)])
 def test_pbkdf2():
     """Module self-test"""
     from binascii import a2b_hex
-    
+
     #
     # Test vectors from RFC 3962
     #
                 "c5ec59f1a452f5cc9ad940fea0598ed1")
     if result != expected:
         raise RuntimeError("self-test failed")
-    
+
     # Test 4
     result = PBKDF2("X"*65, "pass phrase exceeds block size", 1200).hexread(32)
     expected = ("9ccad6d468770cd51b10e6a68721be61"
                 "1a8b4d282601db3b36be9246915ec82a")
     if result != expected:
         raise RuntimeError("self-test failed")
-    
+
     #
     # Other test vectors
     #
-    
+
     # Chunked read
     f = PBKDF2("kickstart", "workbench", 256)
     result = f.read(17)
     expected = PBKDF2("kickstart", "workbench", 256).read(40)
     if result != expected:
         raise RuntimeError("self-test failed")
-    
+
     #
     # crypt() test vectors
     #
     expected = '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'
     if result != expected:
         raise RuntimeError("self-test failed")
-    
+
     # crypt 2
     result = crypt("gnu", '$p5k2$c$u9HvcT4d$.....')
     expected = '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'
     expected = "$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL"
     if result != expected:
         raise RuntimeError("self-test failed")
-    
+
     # crypt 4 (unicode)
     result = crypt(u'\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2',
         '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ')

beaker/crypto/pycrypto.py

     def aesEncrypt(data, key):
         cipher = aes.AES(key)
         return cipher.process(data)
-    
+
     # magic.
     aesDecrypt = aesEncrypt
-    
+
 except ImportError:
     from Crypto.Cipher import AES
 
     def aesEncrypt(data, key):
         cipher = AES.new(key)
-        
+
         data = data + (" " * (16 - (len(data) % 16)))
         return cipher.encrypt(data)
 

beaker/crypto/util.py

     # Use PyCrypto (if available)
     from Crypto.Hash import HMAC as hmac, SHA as hmac_sha1
     sha1 = hmac_sha1.new
-    
+
 except ImportError:
-    
+
     # PyCrypto not available.  Use the Python standard library.
     import hmac
 

beaker/docs/_templates/index.html

       for easy drop-in use with <a href="http://www.python.org/dev/peps/pep-0333/">WSGI</a>
       based web applications, and caching decorators for ease of use with
       any Python based application.</p>
-  
+
   <ul>
       <li><b>Lazy-Loading Sessions:</b> No performance hit for having sessions active in
           a request unless they're actually used</li>
           <a href="http://peak.telecommunity.com/DevCenter/setuptools#extensible-applications-and-frameworks">setuptools entrypoints</a>
            to support new back-ends.</li>
   </ul>
-  
+
   <h2>News</h2>
   <ul>
       <li>Beaker 1.5.4 released on 6/16/2010.</li>
          <span class="linkdescr">quick access to all documented modules</span></p>
     </td></tr>
   </table>
-  
+
   <p>Download <a href="http://beaker.groovie.org/beaker.pdf">Beaker PDF documentation</a>.</p>
 
   <h2>Source</h2>
   <p>Beaker's Mercurial repository can be found at
     <a href="http://bitbucket.org/bbangert/beaker/"><tt>http://bitbucket.org/bbangert/beaker/</tt></a>.</p>
-    
+
     <p>To check out:</p>
-    
+
       <pre>hg clone http://www.bitbucket.org/bbangert/beaker</pre>
 
 {% endblock %}

beaker/docs/caching.rst

 examples below assume that it has already been created.
 
 Creating the cache instance::
-    
+
     from beaker.cache import CacheManager
     from beaker.util import parse_cache_config_options
 
 
 To store data for a cache value, first, a NamespaceManager has to be
 retrieved to manage the keys for a ``thing`` to be cached::
-    
+
     # Assuming that cache is an already created CacheManager instance
     tmpl_cache = cache.get_cache('mytemplate.html', type='dbm', expire=3600)
 
 creation function must not accept any arguments as it won't be called with
 any. Options affecting the created value can be passed in by using closure
 scope on the creation function::
-    
+
     search_param = 'gophers'
-    
+
     def get_results():
         # do something to retrieve data
         data = get_data(search_param)
         return data
-    
+
     # Cache this function, based on the search_param, using the tmpl_cache
     # instance from the prior example
     results = tmpl_cache.get(key=search_param, createfunc=get_results)
 
 All of the values for a particular namespace can be removed by calling the
 :meth:`~beaker.cache.Cache.clear` method::
-    
+
     tmpl_cache.clear()
 
 Note that this only clears the key's in the namespace that this particular
 in the same location.
 
 For example::
-    
+
     # Assuming that cache is an already created CacheManager instance
     @cache.cache('my_search_func', expire=3600)
     def get_results(search_param):
         # do something to retrieve data
         data = get_data(search_param)
         return data
-    
+
     results = get_results('gophers')
 
 The non-keyword arguments to the :meth:`~beaker.cache.CacheManager.cache`
 namespace used, manually removing the key requires the use of the
 :meth:`~beaker.cache.CacheManager.invalidate` function. To invalidate
 the 'gophers' result that the prior example referred to::
-    
+
     cache.invalidate(get_results, 'my_search_func', 'gophers')
 
 If however, a type was specified for the cached function, the type must
 function so that it can remove the value from the appropriate back-end.
 
 Example::
-    
+
     # Assuming that cache is an already created CacheManager instance
     @cache.cache('my_search_func', type="file", expire=3600)
     def get_results(search_param):
         # do something to retrieve data
         data = get_data(search_param)
         return data
-    
+
     cache.invalidate(get_results, 'my_search_func', 'gophers', type="file")
 
 .. note::
 
 Assuming a ``long_term`` and ``short_term`` region were setup, the 
 :meth:`~beaker.cache.CacheManager.region` decorator can be used::
-    
+
     @cache.region('short_term', 'my_search_func')
     def get_results(search_param):
         # do something to retrieve data
         data = get_data(search_param)
         return data
-    
+
     results = get_results('gophers')
 
 Or using the :func:`~beaker.cache.cache_region` decorator::
-    
+
     @cache_region('short_term', 'my_search_func')
     def get_results(search_param):
         # do something to retrieve data
         data = get_data(search_param)
         return data
-    
+
     results = get_results('gophers')
 
 The only difference with the :func:`~beaker.cache.cache_region` decorator is
 namespace used, manually removing the key requires the use of the
 :meth:`~beaker.cache.CacheManager.region_invalidate` function. To invalidate
 the 'gophers' result that the prior example referred to::
-    
+
     cache.region_invalidate(get_results, None, 'my_search_func', 'gophers')
 
 Or when using the :func:`~beaker.cache.cache_region` decorator, the
 :func:`beaker.cache.region_invalidate` function should be used::
-    
+
     region_invalidate(get_results, None, 'my_search_func', 'gophers')
 
 .. note::

beaker/docs/configuration.rst

 
     When using the options in a framework like `Pylons`_ or `TurboGears2`_, these
     options must be prefixed by ``beaker.``, for example in a `Pylons`_ INI file::
-    
+
         beaker.session.data_dir = %(here)s/data/sessions/data
         beaker.session.lock_dir = %(here)s/data/sessions/lock
 
 .. code-block:: python
 
     from beaker.middleware import SessionMiddleware
-    
+
     session_opts = {
         'session.cookie_expires': True
     }
-    
+
     app = SomeWSGIAPP()
     app = SessionMiddleware(app, session_opts)
 
     cache = CacheManager(**parse_cache_config_options(cache_opts))
 
 .. note::
-    
+
     When using the CacheManager directly, all dict options must be run through the
     :func:`beaker.util.parse_cache_config_options` function to ensure they're valid
     and of the appropriate type.
 
 type (**required**, string)
     The name of the back-end to use for storing the sessions or cache objects.
-    
+
     Available back-ends supplied with Beaker: ``file``, ``dbm``, ``memory``,
     ``ext:memcached``, ``ext:database``, ``ext:google``
-    
+
     For sessions, the additional type of ``cookie`` is available which
     will store all the session data in the cookie itself. As such, size
     limitations apply (4096 bytes).
-    
+
     Some of these back-ends require the url option as listed below.
 
 webtest_varname (**optionall**, string)
 url (**optional**, string)
     URL is specific to use of either ext:memcached or ext:database. When using
     one of those types, this option is **required**.
-    
+
     When used with ext:memcached, this should be either a single, or
     semi-colon separated list of memcached servers::
-        
+
         session_opts = {
             'session.type': 'ext:memcached',
             'session.url': '127.0.0.1:11211',
         }
-    
+
     When used with ext:database, this should be a valid `SQLAlchemy`_ database
     string.
 
     When set to True, the session will save itself anytime it is accessed
     during a request, negating the need to issue the 
     :meth:`~beaker.session.Session.save` method.
-    
+
     Defaults to False.
 
 cookie_expires (**optional**, bool, datetime, timedelta)
     Determines when the cookie used to track the client-side of the session
     will expire. When set to a boolean value, it will either expire at the
     end of the browsers session, or never expire.
-    
+
     Setting to a datetime forces a hard ending time for the session (generally
     used for setting a session to a far off date).
-    
+
     Defaults to never expiring.
 
 
     should be set to the main domain the cookie should be valid for. For
     example, if a cookie should be valid under ``www.nowhere.com`` **and**
     ``files.nowhere.com`` then it should be set to ``.nowhere.com``.
-    
+
     Defaults to the current domain in its entirety.
-    
+
     Alternatively, the domain can be set dynamically on the session by
     calling, see :ref:`cookie_attributes`.
 
 secret (**required**, string)
     Used with the HMAC to ensure session integrity. This value should
     ideally be a randomly generated string.
-    
+
     When using in a cluster environment, the secret must be the same on
     every machine.
 
     Seconds until the session is considered invalid, after which it will
     be ignored and invalidated. This number is based on the time since
     the session was last accessed, not from when the session was created.
-    
+
     Defaults to never expiring.
 
 
 
 enabled (**optional**, bool)
     Quick toggle to disable or enable caching across an entire application.
-    
+
     This should generally be used when testing an application or in
     development when caching should be ignored.
-    
+
     Defaults to True.
 
 expire (**optional**, integer)
 
 regions (**optional**, list, tuple)
     Names of the regions that are to be configured.
-    
+
     For each region, all of the other cache options are valid and will
     be read out of the cache options for that key. Options that are not
     listed under a region will be used globally in the cache unless a
     region specifies a different value.
-    
+
     For example, to specify two batches of options, one called ``long-term``,
     and one called ``short-term``::
-        
+
         cache_opts = {
             'cache.data_dir': '/tmp/cache/data',
             'cache.lock_dir': '/tmp/cache/lock'

beaker/docs/contents.rst

 
 .. toctree::
    :maxdepth: 2
-   
+
    configuration
    sessions
    caching
    :maxdepth: 1
 
    changes
-   
+
 
 Indices and tables
 ==================
     modules/memcached
     modules/sqla
     modules/pbkdf2
-    
-    
+
+

beaker/docs/glossary.rst

 ========
 
 .. glossary::
-    
+
     Cache Regions
         Bundles of configuration options keyed to a user-defined variable
         for use with the :meth:`beaker.cache.CacheManager.region`
         decorator.
-    
+
     Container
         A Beaker container is a storage object for a specific cache value
         and the key under the namespace it has been assigned.
-    
+
     Dog-Pile Effect
         What occurs when a cached object expires, and multiple requests to
         fetch it are made at the same time. In systems that don't lock or
         use a scheme to prevent multiple instances from simultaneously
         creating the same thing, every request will cause the system to
         create a new value to be cached.
-        
+
         Beaker alleviates this with file locking to ensure that only a single
         copy is re-created while other requests for the same object are
         instead given the old value until the new one is ready.
-    
+
     NamespaceManager
         A Beaker namespace manager, is best thought of as a collection of
         containers with various keys. For example, a single template to be
         cached might vary slightly depending on search term, or user login, so
         the template would be keyed based on the variable that changes its
         output.
-        
+
         The namespace would be the template name, while each container would
         correspond to one of the values and the key it responds to.

beaker/docs/modules/container.rst

 .. autoclass:: OpenResourceNamespaceManager
 .. autoclass:: Value
    :members: set_value, has_value, can_have_value, has_current_value, get_value, clear_value
-   

beaker/docs/modules/google.rst

 
 .. autoclass:: GoogleContainer
 .. autoclass:: GoogleNamespaceManager
-   

beaker/docs/modules/memcached.rst

 
 .. autoclass:: MemcachedContainer
 .. autoclass:: MemcachedNamespaceManager
-   

beaker/docs/modules/middleware.rst

 
 .. autoclass:: CacheMiddleware
 .. autoclass:: SessionMiddleware
-   

beaker/docs/modules/pbkdf2.rst

 .. autofunction:: crypt
 .. autoclass:: PBKDF2
    :members: close, hexread, read
-   

beaker/docs/modules/session.rst

 .. autoclass:: SessionObject
    :members: persist, get_by_id, accessed
 .. autoclass:: SignedCookie
-   

beaker/docs/modules/sqla.rst

 .. autofunction:: make_cache_table
 .. autoclass:: SqlaContainer
 .. autoclass:: SqlaNamespaceManager
-   

beaker/docs/sessions.rst

 environ.
 
 Getting data out of the session::
-    
+
     myvar = session['somekey']
 
 Testing for a value::
-    
+
     logged_in = 'user_id' in session
 
 Adding data to the session::
-    
+
     session['name'] = 'Fred Smith'
 
 Complete example using a basic WSGI app with sessions::
     def simple_app(environ, start_response):
         # Get the session object from the environ
         session = environ['beaker.session']
-        
+
         # Check to see if a value is in the session
         if 'logged_in' in session:
             user = True
         else:
             user = False
-        
+
         # Set some other session variable
         session['user_id'] = 10
-        
+
         start_response('200 OK', [('Content-type', 'text/plain')])
         return ['User is logged in: %s' % user]
-    
+
     # Configure the SessionMiddleware
     session_opts = {
         'session.type': 'file',
 
 Sessions can be saved using the :meth:`~beaker.session.Session.save` method
 on the session object::
-    
+
     session.save()
 
 .. warning::
-    
+
     Beaker relies on Python's pickle module to pickle data objects for storage
     in the session. Objects that cannot be pickled should **not** be stored in
     the session.
 
 If it's necessary to immediately save the session to the back-end, the
 :meth:`~beaker.session.SessionObject.persist` method should be used::
-    
+
     session.persist()
 
 This is not usually the case however, as a session generally should not be
 saved should something catastrophic happen during a request.
 
 .. note::
-    
+
     When using the Beaker middleware, you **must call save before the headers
     are sent to the client**. Since Beaker's middleware watches for when the
     ``start_response`` function is called to know that it should add its
 Calling the :meth:`~beaker.session.Session.delete` method deletes the session
 from the back-end storage and sends an expiration on the cookie requesting the
 browser to clear it::
-    
+
     session.delete()
 
 This should be used at the end of a request when the session should be deleted
 If a session should be invalidated, and a new session created and used during
 the request, the :meth:`~beaker.session.Session.invalidate` method should be
 used::
-    
+
     session.invalidate()
 
 Removing Expired/Old Sessions
 haven't been touched in a long time, for example (in the session's data dir):
 
 .. code-block:: bash
-    
+
     find . -mtime +3 -exec rm {} \;
 
 
 These settings will persist as long as the cookie exists, or until changed.
 
 Example::
-    
+
     # Setting the session's cookie domain and path
     session.domain = '.domain.com'
     session.path = '/admin'

beaker/ext/database.py

         except ImportError:
             raise InvalidCacheBackendError("Database cache backend requires "
                                             "the 'sqlalchemy' library")
-        
+
     def __init__(self, namespace, url=None, sa_opts=None, optimistic=False,
                  table_name='beaker_cache', data_dir=None, lock_dir=None,
                  **params):
         """Creates a database namespace manager
-        
+
         ``url``
             SQLAlchemy compliant db url
         ``sa_opts``
             The table name to use in the database for the cache.
         """
         OpenResourceNamespaceManager.__init__(self, namespace)
-        
+
         if sa_opts is None:
             sa_opts = params
 
         elif data_dir:
             self.lock_dir = data_dir + "/container_db_lock"
         if self.lock_dir:
-            verify_directory(self.lock_dir)            
-        
+            verify_directory(self.lock_dir)
+
         # Check to see if the table's been created before
         url = url or sa_opts['sa.url']
         table_key = url + table_name
         self._is_new = False
         self.loaded = False
         self.cache = DatabaseNamespaceManager.tables.get(table_key, make_cache)
-    
+
     def get_access_lock(self):
         return null_synchronizer()
 
         if self.loaded:
             self.flags = flags
             return
-        
+
         cache = self.cache
         result = sa.select([cache.c.data], 
                            cache.c.namespace==self.namespace
                 self._is_new = True
         self.flags = flags
         self.loaded = True
-    
+
     def do_close(self):
         if self.flags is not None and (self.flags == 'c' or self.flags == 'w'):
             cache = self.cache
                 cache.update(cache.c.namespace==self.namespace).execute(
                     data=self.hash, accessed=datetime.now())
         self.flags = None
-    
+
     def do_remove(self):
         cache = self.cache
         cache.delete(cache.c.namespace==self.namespace).execute()
         self.hash = {}
-        
+
         # We can retain the fact that we did a load attempt, but since the
         # file is gone this will be a new namespace should it be saved.
         self._is_new = True
 
     def __contains__(self, key): 
         return self.hash.has_key(key)
-        
+
     def __setitem__(self, key, value):
         self.hash[key] = value
 

beaker/ext/google.py

         except ImportError:
             raise InvalidCacheBackendError("Datastore cache backend requires the "
                                            "'google.appengine.ext' library")
-    
+
     def __init__(self, namespace, table_name='beaker_cache', **params):
         """Creates a datastore namespace manager"""
         OpenResourceNamespaceManager.__init__(self, namespace)
-        
+
         def make_cache():
             table_dict = dict(created=db.DateTimeProperty(),
                               accessed=db.DateTimeProperty(),
         self._is_new = False
         self.loaded = False
         self.log_debug = logging.DEBUG >= log.getEffectiveLevel()
-        
+
         # Google wants namespaces to start with letters, change the namespace
         # to start with a letter
         self.namespace = 'p%s' % self.namespace
-    
+
     def get_access_lock(self):
         return null_synchronizer()
 
         if self.loaded:
             self.flags = flags
             return
-        
+
         item = self.cache.get_by_key_name(self.namespace)
-        
+
         if not item:
             self._is_new = True
             self.hash = {}
                 self._is_new = True
         self.flags = flags
         self.loaded = True
-    
+
     def do_close(self):
         if self.flags is not None and (self.flags == 'c' or self.flags == 'w'):
             if self._is_new:
                 item.accessed = datetime.now()
                 item.put()
         self.flags = None
-    
+
     def do_remove(self):
         item = self.cache.get_by_key_name(self.namespace)
         item.delete()
         self.hash = {}
-        
+
         # We can retain the fact that we did a load attempt, but since the
         # file is gone this will be a new namespace should it be saved.
         self._is_new = True
 
     def __contains__(self, key): 
         return self.hash.has_key(key)
-        
+
     def __setitem__(self, key, value):
         self.hash[key] = value
 
 
     def keys(self):
         return self.hash.keys()
-        
+
 
 class GoogleContainer(Container):
     namespace_class = GoogleNamespaceManager

beaker/ext/sqla.py

         elif data_dir:
             self.lock_dir = data_dir + "/container_db_lock"
         if self.lock_dir:
-            verify_directory(self.lock_dir)            
+            verify_directory(self.lock_dir)
 
         self.bind = self.__class__.binds.get(str(bind.url), lambda: bind)
         self.table = self.__class__.tables.get('%s:%s' % (bind.url, table.name),

beaker/middleware.py

 
 class CacheMiddleware(object):
     cache = beaker_cache
-    
+
     def __init__(self, app, config=None, environ_key='beaker.cache', **kwargs):
         """Initialize the Cache Middleware
-        
+
         The Cache middleware will make a Cache instance available
         every request under the ``environ['beaker.cache']`` key by
         default. The location in environ can be changed by setting
         ``environ_key``.
-        
+
         ``config``
             dict  All settings should be prefixed by 'cache.'. This
             method of passing variables is intended for Paste and other
             single dictionary. If config contains *no cache. prefixed
             args*, then *all* of the config options will be used to
             intialize the Cache objects.
-        
+
         ``environ_key``
             Location where the Cache instance will keyed in the WSGI
             environ
-        
+
         ``**kwargs``
             All keyword arguments are assumed to be cache settings and
             will override any settings found in ``config``
         """
         self.app = app
         config = config or {}
-        
+
         self.options = {}
-        
+
         # Update the options with the parsed config
         self.options.update(parse_cache_config_options(config))
-        
+
         # Add any options from kwargs, but leave out the defaults this
         # time
         self.options.update(
             parse_cache_config_options(kwargs, include_defaults=False))
-                
+
         # Assume all keys are intended for cache if none are prefixed with
         # 'cache.'
         if not self.options and config:
             self.options = config
-        
+
         self.options.update(kwargs)
         self.cache_manager = CacheManager(**self.options)
         self.environ_key = environ_key
-    
+
     def __call__(self, environ, start_response):
         if environ.get('paste.registry'):
             if environ['paste.registry'].reglist:
 
 class SessionMiddleware(object):
     session = beaker_session
-    
+
     def __init__(self, wrap_app, config=None, environ_key='beaker.session',
                  **kwargs):
         """Initialize the Session Middleware
-        
+
         The Session middleware will make a lazy session instance
         available every request under the ``environ['beaker.session']``
         key by default. The location in environ can be changed by
         setting ``environ_key``.
-        
+
         ``config``
             dict  All settings should be prefixed by 'session.'. This
             method of passing variables is intended for Paste and other
             single dictionary. If config contains *no cache. prefixed
             args*, then *all* of the config options will be used to
             intialize the Cache objects.
-        
+
         ``environ_key``
             Location where the Session instance will keyed in the WSGI
             environ
-        
+
         ``**kwargs``
             All keyword arguments are assumed to be session settings and
             will override any settings found in ``config``
 
         """
         config = config or {}
-        
+
         # Load up the default params
         self.options = dict(invalidate_corrupt=True, type=None, 
                            data_dir=None, key='beaker.session.id', 
                     warnings.warn('Session options should start with session. '
                                   'instead of session_.', DeprecationWarning, 2)
                     self.options[key[8:]] = val
-        
+
         # Coerce and validate session params
         coerce_session_params(self.options)
-        
+
         # Assume all keys are intended for cache if none are prefixed with
         # 'cache.'
         if not self.options and config:
             self.options = config
-        
+
         self.options.update(kwargs)
         self.wrap_app = wrap_app
         self.environ_key = environ_key
-        
+
     def __call__(self, environ, start_response):
         session = SessionObject(environ, **self.options)
         if environ.get('paste.registry'):
                 environ['paste.registry'].register(self.session, session)
         environ[self.environ_key] = session
         environ['beaker.get_session'] = self._get_session
-        
+
         if 'paste.testing_variables' in environ and 'webtest_varname' in self.options:
             environ['paste.testing_variables'][self.options['webtest_varname']] = session
-        
+
         def session_start_response(status, headers, exc_info = None):
             if session.accessed():
                 session.persist()
                         headers.append(('Set-cookie', cookie))
             return start_response(status, headers, exc_info)
         return self.wrap_app(environ, session_start_response)
-    
+
     def _get_session(self):
         return Session({}, use_cookies=False, **self.options)
 

beaker/session.py

 import random
 import time
 from datetime import datetime, timedelta
-    
+
 from beaker.crypto import hmac as HMAC, hmac_sha1 as SHA1, md5
 from beaker.util import pickle
 
     def __init__(self, secret, input=None):
         self.secret = secret
         Cookie.BaseCookie.__init__(self, input)
-    
+
     def value_decode(self, val):
         val = val.strip('"')
         sig = HMAC.new(self.secret, val[40:], SHA1).hexdigest()
-        
+
         # Avoid timing attacks
         invalid_bits = 0
         input_sig = val[:40]
         if len(sig) != len(input_sig):
             return None, val
-        
+
         for a, b in zip(sig, input_sig):
             invalid_bits += a != b
-        
+
         if invalid_bits:
             return None, val
         else:
             return val[40:], val
-    
+
     def value_encode(self, val):
         sig = HMAC.new(self.secret, val, SHA1).hexdigest()
         return str(val), ("%s%s" % (sig, val))
 
 class Session(dict):
     """Session object that uses container package for storage.
-    
+
     ``key``
         The name the cookie should be set to.
     ``timeout``
         self.namespace_class = namespace_class or clsmap[self.type]
 
         self.namespace_args = namespace_args
-        
+
         self.request = request
         self.data_dir = data_dir
         self.key = key
-        
+
         self.timeout = timeout
         self.use_cookies = use_cookies
         self.cookie_expires = cookie_expires
-        
+
         # Default cookie domain/path
         self._domain = cookie_domain
         self._path = '/'
         self.secure = secure
         self.id = id
         self.accessed_dict = {}
-        
+
         if self.use_cookies:
             cookieheader = request.get('cookie', '')
             if secret:
                     self.cookie = SignedCookie(secret, input=None)
             else:
                 self.cookie = Cookie.SimpleCookie(input=cookieheader)
-            
+
             if not self.id and self.key in self.cookie:
                 self.id = self.cookie[self.key].value
-        
+
         self.is_new = self.id is None
         if self.is_new:
             self._create_id()
                     self.invalidate()
                 else:
                     raise
-        
+
     def _create_id(self):
         self.id = md5(
             md5("%f%s%f%s" % (time.time(), id({}), random.random(),
                     expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" )
             self.request['cookie_out'] = self.cookie[self.key].output(header='')
             self.request['set_cookie'] = False
-    
+
     def created(self):
         return self['_creation_time']
     created = property(created)
-    
+
     def _set_domain(self, domain):
         self['_domain'] = domain
         self.cookie[self.key]['domain'] = domain
         self.request['cookie_out'] = self.cookie[self.key].output(header='')
         self.request['set_cookie'] = True
-    
+
     def _get_domain(self):
         return self._domain
-    
+
     domain = property(_get_domain, _set_domain)
-    
+
     def _set_path(self, path):
         self['_path'] = path
         self.cookie[self.key]['path'] = path
         self.request['cookie_out'] = self.cookie[self.key].output(header='')
         self.request['set_cookie'] = True
-    
+
     def _get_path(self):
         return self._path
-    
+
     path = property(_get_path, _set_path)
 
     def _delete_cookie(self):
         self.was_invalidated = True
         self._create_id()
         self.load()
-    
+
     def load(self):
         "Loads the data from this session from persistent storage"
         self.namespace = self.namespace_class(self.id,
             **self.namespace_args)
         now = time.time()
         self.request['set_cookie'] = True
-        
+
         self.namespace.acquire_read_lock()
         timed_out = False
         try:
                     '_accessed_time':now
                 }
                 self.is_new = True
-            
+
             if self.timeout is not None and \
                now - session_data['_accessed_time'] > self.timeout:
                 timed_out= True
                     self.last_accessed = None
                 else:
                     self.last_accessed = session_data['_accessed_time']
-                
+
                 # Update the current _accessed_time
                 session_data['_accessed_time'] = now
                 self.update(session_data)
-                self.accessed_dict = session_data.copy()                
+                self.accessed_dict = session_data.copy()
         finally:
             self.namespace.release_read_lock()
         if timed_out:
             self.invalidate()
-    
+
     def save(self, accessed_only=False):
         """Saves the data for this session to persistent storage
-        
+
         If accessed_only is True, then only the original data loaded
         at the beginning of the request will be saved, with the updated
         last accessed time.
-        
+
         """
         # Look to see if its a new session that was only accessed
         # Don't save it under that case
         if accessed_only and self.is_new:
             return None
-        
+
         if not hasattr(self, 'namespace'):
             self.namespace = self.namespace_class(
                                     self.id, 
                                     data_dir=self.data_dir,
                                     digest_filenames=False, 
                                     **self.namespace_args)
-        
+
         self.namespace.acquire_write_lock()
         try:
             if accessed_only:
                 data = dict(self.accessed_dict.items())
             else:
                 data = dict(self.items())
-            
+
             # Save the data
             if not data and 'session' in self.namespace:
                 del self.namespace['session']
             self.namespace.release_write_lock()
         if self.is_new:
             self.request['set_cookie'] = True
-    
+
     def revert(self):
         """Revert the session to its original state from its first
         access in the request"""
         self.clear()
         self.update(self.accessed_dict)
-    
+
     # TODO: I think both these methods should be removed.  They're from
     # the original mod_python code i was ripping off but they really
     # have no use here.
     def lock(self):
         """Locks this session against other processes/threads.  This is
         automatic when load/save is called.
-        
+
         ***use with caution*** and always with a corresponding 'unlock'
         inside a "finally:" block, as a stray lock typically cannot be
         unlocked without shutting down the whole application.
 
 class CookieSession(Session):
     """Pure cookie-based session
-    
+
     Options recognized when using cookie-based sessions are slightly
     more restricted than general sessions.
-    
+
     ``key``
         The name the cookie should be set to.
     ``timeout``
         Domain to use for the cookie.
     ``secure``
         Whether or not the cookie should only be sent over SSL.
-    
+
     """
     def __init__(self, request, key='beaker.session.id', timeout=None,
                  cookie_expires=True, cookie_domain=None, encrypt_key=None,
                  validate_key=None, secure=False, **kwargs):
-        
+
         if not crypto.has_aes and encrypt_key:
             raise InvalidCryptoBackendError("No AES library is installed, can't generate "
                                   "encrypted cookie-only Session.")
-        
+
         self.request = request
         self.key = key
         self.timeout = timeout
         self.secure = secure
         self._domain = cookie_domain
         self._path = '/'
-        
+
         try:
             cookieheader = request['cookie']
         except KeyError:
             cookieheader = ''
-        
+
         if validate_key is None:
             raise BeakerException("No validate_key specified for Cookie only "
                                   "Session.")
-        
+
         try:
             self.cookie = SignedCookie(validate_key, input=cookieheader)
         except Cookie.CookieError:
             self.cookie = SignedCookie(validate_key, input=None)
-        
+
         self['_id'] = self._make_id()
         self.is_new = True
-        
+
         # If we have a cookie, load it
         if self.key in self.cookie and self.cookie[self.key].value is not None:
             self.is_new = False
                 self.clear()
             self.accessed_dict = self.copy()
             self._create_cookie()
-    
+
     def created(self):
         return self['_creation_time']
     created = property(created)
-    
+
     def id(self):
         return self['_id']
     id = property(id)
     def _set_domain(self, domain):
         self['_domain'] = domain
         self._domain = domain
-        
+
     def _get_domain(self):
         return self._domain
-    
+
     domain = property(_get_domain, _set_domain)
-    
+
     def _set_path(self, path):
         self['_path'] = path
         self._path = path
-    
+
     def _get_path(self):
         return self._path
-    
+
     path = property(_get_path, _set_path)
 
     def _encrypt_data(self):
         else:
             data = pickle.dumps(self.copy(), 2)
             return b64encode(data)
-    
+
     def _decrypt_data(self):
         """Bas64, decipher, then un-serialize the data for the session
         dict"""
         else:
             data = b64decode(self.cookie[self.key].value)
             return pickle.loads(data)
-    
+
     def _make_id(self):
         return md5(md5(
             "%f%s%f%s" % (time.time(), id({}), random.random(), getpid())
             ).hexdigest()
         ).hexdigest()
-    
+
     def save(self, accessed_only=False):
         """Saves the data for this session to persistent storage"""
         if accessed_only and self.is_new:
             self.clear()
             self.update(self.accessed_dict)
         self._create_cookie()
-    
+
     def expire(self):
         """Delete the 'expires' attribute on this Session, if any."""
-        
+
         self.pop('_expires', None)
-        
+
     def _create_cookie(self):
         if '_creation_time' not in self:
             self['_creation_time'] = time.time()
         if '_id' not in self:
             self['_id'] = self._make_id()
         self['_accessed_time'] = time.time()
-        
+
         if self.cookie_expires is not True:
             if self.cookie_expires is False:
                 expires = datetime.fromtimestamp( 0x7FFFFFFF )
         val = self._encrypt_data()
         if len(val) > 4064:
             raise BeakerException("Cookie value is too long to store")
-        
+
         self.cookie[self.key] = val
         if '_domain' in self:
             self.cookie[self.key]['domain'] = self['_domain']
             self.cookie[self.key]['domain'] = self._domain
         if self.secure:
             self.cookie[self.key]['secure'] = True
-        
+
         self.cookie[self.key]['path'] = self.get('_path', '/')
-        
+
         if expires:
             self.cookie[self.key]['expires'] = \
                 expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" )
         self.request['cookie_out'] = self.cookie[self.key].output(header='')
         self.request['set_cookie'] = True
-    
+
     def delete(self):
         """Delete the cookie, and clear the session"""
         # Send a delete cookie request
         self._delete_cookie()
         self.clear()
-    
+
     def invalidate(self):
         """Clear the contents and start a new session"""
         self.delete()
 
 class SessionObject(object):
     """Session proxy/lazy creator
-    
+
     This object proxies access to the actual session object, so that in
     the case that the session hasn't been used before, it will be
     setup. This avoid creating and loading the session from persistent
     storage unless its actually used during the request.
-    
+
     """
     def __init__(self, environ, **params):
         self.__dict__['_params'] = params
         self.__dict__['_environ'] = environ
         self.__dict__['_sess'] = None
         self.__dict__['_headers'] = []
-    
+
     def _session(self):
         """Lazy initial creation of session object"""
         if self.__dict__['_sess'] is None:
                 self.__dict__['_sess'] = Session(req, use_cookies=True,
                                                  **params)
         return self.__dict__['_sess']
-    
+
     def __getattr__(self, attr):
         return getattr(self._session(), attr)
-    
+
     def __setattr__(self, attr, value):
         setattr(self._session(), attr, value)
-    
+
     def __delattr__(self, name):
         self._session().__delattr__(name)
-    
+
     def __getitem__(self, key):
         return self._session()[key]
-    
+
     def __setitem__(self, key, value):
         self._session()[key] = value
-    
+
     def __delitem__(self, key):
         self._session().__delitem__(key)
-    
+
     def __repr__(self):
         return self._session().__repr__()
-    
+
     def __iter__(self):
         """Only works for proxying to a dict"""
         return iter(self._session().keys())
-    
+
     def __contains__(self, key):
         return self._session().has_key(key)
-    
+
     def get_by_id(self, id):
         """Loads a session given a session ID"""
         params = self.__dict__['_params']
         if session.is_new:
             return None
         return session
-    
+
     def save(self):
         self.__dict__['_dirty'] = True
-    
+
     def delete(self):
         self.__dict__['_dirty'] = True
         self._session().delete()
-    
+
     def persist(self):
         """Persist the session to the storage
-        
+
         If its set to autosave, then the entire session will be saved
         regardless of if save() has been called. Otherwise, just the
         accessed time will be updated if save() was not called, or
         the session will be saved if save() was called.
-        
+
         """
         if self.__dict__['_params'].get('auto'):
             self._session().save()
                 self._session().save()
             else:
                 self._session().save(accessed_only=True)
-    
+
     def dirty(self):
         return self.__dict__.get('_dirty', False)
-    
+
     def accessed(self):
         """Returns whether or not the session has been accessed"""
         return self.__dict__['_sess'] is not None

beaker/synchronization.py

 
 class NameLock(object):
     """a proxy for an RLock object that is stored in a name based
-    registry.  
-    
+    registry.
+
     Multiple threads can get a reference to the same RLock based on the
     name alone, and synchronize operations related to that name.
 
-    """     
+    """
     locks = util.WeakValuedRegistry()
 
     class NLContainer(object):
         else:
             return self._state.get()
     state = property(state)
-    
+
     def release_read_lock(self):
         state = self.state
 
             raise LockError("lock is in writing state")
         if not state.reading: 
             raise LockError("lock is not in reading state")
-        
+
         if state.reentrantcount == 1:
             self.do_release_read_lock()
             state.reading = False
 
         state.reentrantcount -= 1
-        
+
     def acquire_read_lock(self, wait = True):
         state = self.state
 
         if state.writing: 
             raise LockError("lock is in writing state")
-        
+
         if state.reentrantcount == 0:
             x = self.do_acquire_read_lock(wait)
             if (wait or x):
         elif state.reading:
             state.reentrantcount += 1
             return True
-            
+
     def release_write_lock(self):
         state = self.state
 
             state.writing = False
 
         state.reentrantcount -= 1
-    
+
     release = release_write_lock
-    
+
     def acquire_write_lock(self, wait  = True):
         state = self.state
 
         if state.reading: 
             raise LockError("lock is in reading state")
-        
+
         if state.reentrantcount == 0:
             x = self.do_acquire_write_lock(wait)
             if (wait or x): 
 
     def do_release_read_lock(self):
         raise NotImplementedError()
-    
+
     def do_acquire_read_lock(self):
         raise NotImplementedError()
-    
+
     def do_release_write_lock(self):
         raise NotImplementedError()
-    
+
     def do_acquire_write_lock(self):
         raise NotImplementedError()
 
 
     Adapted for Python/multithreads from Apache::Session::Lock::File,
     http://search.cpan.org/src/CWEST/Apache-Session-1.81/Session/Lock/File.pm
-    
+
     This module does not unlink temporary files, 
     because it interferes with proper locking.  This can cause 
     problems on certain systems (Linux) whose file systems (ext2) do not 
     perform well with lots of files in one directory.  To prevent this
     you should use a script to clean out old files from your lock directory.
-    
+
     """
     def __init__(self, identifier, lock_dir):
         super(FileSynchronizer, self).__init__()
         self._filedescriptor = util.ThreadLocal()
-        
+
         if lock_dir is None:
             lock_dir = tempfile.gettempdir()
         else:
     def _filedesc(self):
         return self._filedescriptor.get()
     _filedesc = property(_filedesc)
-        
+
     def _open(self, mode):
         filedescriptor = self._filedesc
         if filedescriptor is None:
             filedescriptor = os.open(self.filename, mode)
             self._filedescriptor.put(filedescriptor)
         return filedescriptor
-            
+
     def do_acquire_read_lock(self, wait):
         filedescriptor = self._open(os.O_CREAT | os.O_RDONLY)
         if not wait:
         else:
             fcntl.flock(filedescriptor, fcntl.LOCK_EX)
             return True
-    
+
     def do_release_read_lock(self):
         self._release_all_locks()
-    
+
     def do_release_write_lock(self):
         self._release_all_locks()
-    
+
     def _release_all_locks(self):
         filedescriptor = self._filedesc
         if filedescriptor is not None:
 
 class ConditionSynchronizer(SynchronizerImpl):
     """a synchronizer using a Condition."""
-    
+
     def __init__(self, identifier):
         super(ConditionSynchronizer, self).__init__()
 
         # condition object to lock on
         self.condition = _threading.Condition(_threading.Lock())
 
-    def do_acquire_read_lock(self, wait = True):    
+    def do_acquire_read_lock(self, wait = True):
         self.condition.acquire()
         try:
             # see if a synchronous operation is waiting to start
 
         if not wait: 
             return True
-        
+
     def do_release_read_lock(self):
         self.condition.acquire()
         try:
             self.async -= 1
-        
+
             # check if we are the last asynchronous reader thread 
             # out the door.
             if self.async == 0:
                                 "release_read_locks called")
         finally:
             self.condition.release()
-    
+
     def do_acquire_write_lock(self, wait = True):
         self.condition.acquire()
         try:
             # here, we are not a synchronous reader, and after returning,
             # assuming waiting or immediate availability, we will be.
-        
+
             if wait:
                 # if another sync is working, wait
                 while self.current_sync_operation is not None:
                 # we dont want to wait, so forget it
                 if self.current_sync_operation is not None:
                     return False
-            
+
             # establish ourselves as the current sync 
             # this indicates to other read/write operations
             # that they should wait until this is None again
                     return False
         finally:
             self.condition.release()
-        
+
         if not wait: 
             return True
 
             if tries > 5:
                 raise
 
-    
+
 def deprecated(message):
     def wrapper(fn):
         def deprecated_method(*args, **kargs):
         deprecated_method.__doc__ = "%s\n\n%s" % (message, fn.__doc__)
         return deprecated_method
     return wrapper
-    
+
 class ThreadLocal(object):
     """stores a value on a per-thread basis"""
 
 
     def __init__(self):
         self._tlocal = _tlocal()
-    
+
     def put(self, value):
         self._tlocal.value = value
-    
+
     def has(self):
         return hasattr(self._tlocal, 'value')
-            
+
     def get(self, default=None):
         return getattr(self._tlocal, 'value', default)
-            
+
     def remove(self):
         del self._tlocal.value
-    
+
 class SyncDict(object):
     """
     An efficient/threadsafe singleton map algorithm, a.k.a.
     "get a value based on this key, and create if not found or not
     valid" paradigm:
-    
+
         exists && isvalid ? get : create
 
     Designed to work with weakref dictionaries to expect items
-    to asynchronously disappear from the dictionary.  
+    to asynchronously disappear from the dictionary.
 
     Use python 2.3.3 or greater !  a major bug was just fixed in Nov.
     2003 that was driving me nuts with garbage collection/weakrefs in
     this section.
 
-    """    
+    """
     def __init__(self):
         self.mutex = _thread.allocate_lock()
         self.dict = {}
-        
+
     def get(self, key, createfunc, *args, **kwargs):
         try:
             if self.has_key(key):
 
     def has_key(self, key):
         return self.dict.has_key(key)
-        
+
     def __contains__(self, key):
         return self.dict.__contains__(key)
     def __getitem__(self, key):
         self.mutex = _threading.RLock()
         self.dict = weakref.WeakValueDictionary()
 
-sha1 = None            
+sha1 = None
 def encoded_path(root, identifiers, extension = ".enc", depth = 3,
                  digest_filenames=True):
-                 
+
     """Generate a unique file-accessible path from the given list of
     identifiers starting at the given root directory."""
     ident = "_".join(identifiers)
-    
+
     global sha1
     if sha1 is None:
         from beaker.crypto import sha1
-        
+
     if digest_filenames:
         if py3k:
             ident = sha1(ident.encode('utf-8')).hexdigest()
         else:
             ident = sha1(ident).hexdigest()
-    
+
     ident = os.path.basename(ident)
 
     tokens = []
     for d in range(1, depth):
         tokens.append(ident[0:d])
-    
+
     dir = os.path.join(root, *tokens)
     verify_directory(dir)
-    
+
     return os.path.join(dir, ident + extension)
 
 
 def parse_cache_config_options(config, include_defaults=True):
     """Parse configuration options and validate for use with the
     CacheManager"""
-    
+
     # Load default cache options
     if include_defaults:
         options= dict(type='memory', data_dir=None, expire=None, 
         if key.startswith('cache.'):
             options[key[6:]] = val
     coerce_cache_params(options)
-    
+
     # Set cache to enabled if not turned off
     if 'enabled' not in options:
         options['enabled'] = True
-    
+
     # Configure region dict if regions are available
     regions = options.pop('regions', None)
     if regions:
     if hasattr(func, 'im_func'):
         kls = func.im_class
         func = func.im_func
-    
+
     if kls:
         return '%s.%s' % (kls.__module__, kls.__name__)
     else:
       entry_points="""
           [paste.filter_factory]
           beaker_session = beaker.middleware:session_filter_factory
-          
+
           [paste.filter_app_factory]
           beaker_session = beaker.middleware:session_filter_app_factory
-          
+
           [beaker.backends]
           database = beaker.ext.database:DatabaseNamespaceManager
           memcached = beaker.ext.memcached:MemcachedNamespaceManager

tests/test_cache_decorator.py

     opts['cache.regions'] = 'short_term, long_term'
     opts['cache.short_term.expire'] = '2'
     cache = make_cache_obj(**opts)
-    
+
     @cache_region('short_term', 'region_loader')
     def load(person):
         now = datetime.now()
     opts['cache.regions'] = 'short_term, long_term'
     opts['cache.short_term.expire'] = '2'
     cache = make_cache_obj(**opts)
-    
+
     @cache_region('short_term')
     def load_person(person):
         now = datetime.now()
     func = make_region_cached_func()
     result = func('Fred')
     assert 'Fred' in result
-    
+
     result2 = func('Fred')
     assert result == result2
-    
+
     result3 = func('George')
     assert 'George' in result3
     result4 = func('George')
     assert result3 == result4
-    
+
     time.sleep(2)
     result2 = func('Fred')
     assert result != result2
     func = make_region_cached_func()
     result = func('Fred')
     assert 'Fred' in result
-    
+
     result2 = func('Fred')
     assert result == result2
     region_invalidate(func, None, 'region_loader', 'Fred')
-    
+
     result3 = func('Fred')
     assert result3 != result2
-    
+
     result2 = func('Fred')
     assert result3 == result2
-    
+
     # Invalidate a non-existent key
     region_invalidate(func, None, 'region_loader', 'Fredd')
     assert result3 == result2
     func = make_region_cached_func_2()
     result = func('Fred')
     assert 'Fred' in result
-    
+
     result2 = func('Fred')
     assert result == result2
     region_invalidate(func, None, 'Fred')
-    
+
     result3 = func('Fred')
     assert result3 != result2
-    
+
     result2 = func('Fred')
     assert result3 == result2
-    
+
     # Invalidate a non-existent key
     region_invalidate(func, None, 'Fredd')
     assert result3 == result2

tests/test_cachemanager.py

     opts['cache.regions'] = 'short_term, long_term'
     opts['cache.short_term.expire'] = '2'
     cache = make_cache_obj(**opts)
-    
+
     @cache.region('short_term', 'region_loader')
     def load(person):
         now = datetime.now()
 def check_decorator(func):
     result = func('Fred')
     assert 'Fred' in result
-