Commits

Andy Mikhailenko committed 9597edb

Continued refactoring: replaced Document.objects(db) with db.find(Document); added unified query tests for all backends (finally!!). At the moment some tests fail, this is to be fixed.

Comments (0)

Files changed (16)

 #    along with Docu.  If not, see <http://gnu.org/licenses/>.
 
 from document_base import Document, Many
-from fields import Field
 from utils import get_db, load_fixture

doqu/backend_base.py

 
     # these must be defined by the backend subclass
     supports_nested_data = False
-    converter_manager = None
-    lookup_manager = None
+    converter_manager = NotImplemented
+    lookup_manager = NotImplemented
+    query_adapter = NotImplemented
 
     #--------------------+
     #  Magic attributes  |
 
     def __init__(self, **kw):
         "Typical kwargs: host, port, name, user, password."
+        attrs = (x for x in dir(self) if not x.startswith('_'))
+        self._assert_implemented(*attrs)
+
         self._connection_options = kw
         self.connection = None
         self.connect()
     #  Private attributes  |
     #----------------------+
 
+    @classmethod
+    def _assert_implemented(cls, *attrs):
+        for attr in attrs:
+            assert getattr(cls, attr) != NotImplemented, (
+                'Backend {cls.__module__} must define '
+                '{cls.__name__}.{attr}'.format(**locals()))
+
     def _decorate(self, model, key, data):
         """
         Populates a model instance with given data and initializes its state
         Returns the document instance and a boolean value "created".
         """
         assert kwargs
-        query = doc_class.objects(self).where(**kwargs)
+        query = self.find(doc_class).where(**kwargs)
         if query.count():
             return query[0], False
         else:
             obj.save(self)
             return obj, True
 
-    def get_query(self):
-        """
-        Returns a Query object bound to this storage.
-        """
-        raise NotImplementedError # pragma: nocover
+#    def find(self, schema, **kwargs):
+#        """ Returns a Query object bound to this storage.
+#        """
+#        raise NotImplementedError # pragma: nocover
+
+    def find(self, schema, **kwargs):
+        query = self.query_adapter(storage=self, model=schema)
+        if hasattr(schema, 'contribute_to_query'):
+            query = schema.contribute_to_query(query)
+        return query.where(**kwargs)
+
+    def get_query(self, model):
+        import warnings
+        warnings.warn('StorageAdapter.get_query() is deprecated, use '
+                      'StorageAdapter.find() instead.', DeprecationWarning)
+        return self.find(model)
+
 
     def reconnect(self):
         """
         raise NotImplementedError # pragma: nocover
 
     def value_from_db(self, datatype, value):
-        assert self.converter_manager, 'backend must provide converter manager'
         return self.converter_manager.from_db(datatype, value)
 
     def value_to_db(self, value):
-        assert self.converter_manager, 'backend must provide converter manager'
         return self.converter_manager.to_db(value, self)
 
 

doqu/document_base.py

 
     @classmethod
     def objects(cls, storage):
+        import warnings
+        warnings.warn('Document.objects(db) is deprecated, use '
+                      'db.find(Document) instead', DeprecationWarning)
+        return storage.find(cls)
+
+    # XXX validation-related, move to a mixin within doqu.validation?
+    @classmethod
+    def contribute_to_query(cls, query):
         """
-        Returns a query for records stored in given storage and associates with
+        Returns a query for records stored in given storage and associated with
         given document class. Usage::
 
             events = Event.objects(db)
             :doc:`ext`).
 
         """
-        # get query for this storage; tell it to decorate all fetched records
-        # with our current model
-        query = storage.get_query(model=cls)
-
-        # but hey, we don't need *all* records, we only want those that belong
-        # to this model! let's use validators to filter the results:
+        # use validators to filter the results to only yield records that
+        # belong to this schema
         for name, validators in cls.meta.validators.iteritems():
             for validator in validators:
                 if hasattr(validator, 'filter_query'):

doqu/ext/mongodb/__init__.py

 dist.check_dependencies(__name__)
 
 import pymongo
+from pymongo.objectid import ObjectId
 
 from doqu.backend_base import BaseStorageAdapter, BaseQueryAdapter
 from doqu.utils.data_structures import CachedIterator
 from lookups import lookup_manager
 
 
-class StorageAdapter(BaseStorageAdapter):
-    """
-    :param host:
-    :param port:
-    :param database:
-    :param collection:
-    """
-    supports_nested_data = True
-
-    converter_manager = converter_manager
-    lookup_manager = lookup_manager
-
-    #--------------------+
-    #  Magic attributes  |
-    #--------------------+
-
-    def __contains__(self, key):
-        key = self._string_to_object_id(key)
-        return bool(self.connection.find({'_id': key}).count())
-
-    def __iter__(self):
-        """
-        Yields all keys available for this connection.
-        """
-        return iter(self.connection.find(spec={}, fields={'_id': 1}))
-
-    def __len__(self):
-        return self.connection.count()
-
-    #----------------------+
-    #  Private attributes  |
-    #----------------------+
-
-    def _decorate(self, model, primary_key, raw_data):
-        data = dict(raw_data)
-        key = data.pop('_id')
-        # this case is for queries where we don't know the PKs in advance;
-        # however, we do know them when fetching a certain document by PK
-        if primary_key is None:
-            primary_key = self._object_id_to_string(key)
-        return super(StorageAdapter, self)._decorate(model, primary_key, data)
-
-    def _object_id_to_string(self, pk):
-        if isinstance(pk, pymongo.objectid.ObjectId):
-            return u'x-objectid-{0}'.format(pk)
-        return pk
-
-    def _string_to_object_id(self, pk):
-        # XXX check consistency
-        # MongoDB will *not* find items by the str/unicode representation of
-        # ObjectId so we must wrap them; however, it *will* find items if their
-        # ids were explicitly defined as plain strings. These strings will most
-        # likely be not accepted by ObjectId as arguments.
-        # Also check StorageAdapter.__contains__, same try/catch there.
-        #print 'TESTING GET', model.__name__, primary_key
-        assert isinstance(pk, basestring)
-        if pk.startswith('x-objectid-'):
-            return pymongo.objectid.ObjectId(pk.split('x-objectid-')[1])
-        return pk
-
-    #--------------+
-    #  Public API  |
-    #--------------+
-
-    def clear(self):
-        """
-        Clears the whole storage from data.
-        """
-        self.connection.remove()
-
-    def connect(self):
-        host = self._connection_options.get('host', '127.0.0.1')
-        port = self._connection_options.get('port', 27017)
-        database_name = self._connection_options.get('database', 'default')
-        collection_name = self._connection_options.get('collection', 'default')
-
-        self._mongo_connection = pymongo.Connection(host, port)
-        self._mongo_database = self._mongo_connection[database_name]
-        self._mongo_collection = self._mongo_database[collection_name]
-        self.connection = self._mongo_collection
-
-    def delete(self, primary_key):
-        """
-        Permanently deletes the record with given primary key from the database.
-        """
-        primary_key = self._string_to_object_id(primary_key)
-        self.connection.remove({'_id': primary_key})
-
-    def disconnect(self):
-        self._mongo_connection.disconnect()
-        self._mongo_connection = None
-        self._mongo_database = None
-        self._mongo_collection = None
-        self.connection = None
-
-    def get(self, model, primary_key):
-        """
-        Returns model instance for given model and primary key.
-        Raises KeyError if there is no item with given key in the database.
-        """
-        obj_id = self._string_to_object_id(primary_key)
-        data = self.connection.find_one({'_id': obj_id})
-        if data:
-            return self._decorate(model, str(primary_key), data)
-        raise KeyError('collection "{collection}" of database "{database}" '
-                       'does not contain key "{key}"'.format(
-                           database = self._mongo_database.name,
-                           collection = self._mongo_collection.name,
-                           key = str(primary_key)
-                       ))
-
-    def get_many(self, doc_class, primary_keys):
-        """
-        Returns a list of documents with primary keys from given list. More
-        efficient than calling :meth:`~StorageAdapter.get` multiple times.
-        """
-        obj_ids = [self._string_to_object_id(pk) for pk in primary_keys]
-        results = self.connection.find({'_id': {'$in': obj_ids}}) or []
-        assert len(results) <= len(primary_keys), '_id must be unique'
-        _get_obj_pk = lambda obj: str(self._object_id_to_string(data['_id']))
-        if len(data) == len(primary_keys):
-            return [self._decorate(model, _get_obj_pk(obj), data)
-                    for data in results]
-        keys = [_get_obj_pk(obj) for obj in results]
-        missing_keys = [pk for pk in keys if pk not in primary_keys]
-        raise KeyError('collection "{collection}" of database "{database}" '
-                       'does not contain keys "{keys}"'.format(
-                           database = self._mongo_database.name,
-                           collection = self._mongo_collection.name,
-                           keys = ', '.join(missing_keys)))
-
-    def save(self, data, primary_key=None):
-        """
-        Saves given model instance into the storage. Returns primary key.
-
-        :param data:
-            dict containing all properties to be saved
-        :param primary_key:
-            the key for given object; if undefined, will be generated
-
-        Note that you must provide current primary key for a model instance which
-        is already in the database in order to update it instead of copying it.
-        """
-        outgoing = data.copy()
-        if primary_key:
-            outgoing.update({'_id': self._string_to_object_id(primary_key)})
-#        print outgoing
-        obj_id = self.connection.save(outgoing)
-        return self._object_id_to_string(obj_id) or primary_key
-#        return unicode(self.connection.save(outgoing) or primary_key)
-
-    def get_query(self, model):
-        return QueryAdapter(storage=self, model=model)
-
-
 class QueryAdapter(CachedIterator, BaseQueryAdapter):
 
     #--------------------+
 #        Deletes all records that match current query.
 #        """
 #        raise NotImplementedError
+
+
+class StorageAdapter(BaseStorageAdapter):
+    """
+    :param host:
+    :param port:
+    :param database:
+    :param collection:
+    """
+    supports_nested_data = True
+
+    converter_manager = converter_manager
+    lookup_manager = lookup_manager
+    query_adapter = QueryAdapter
+
+    #--------------------+
+    #  Magic attributes  |
+    #--------------------+
+
+    def __contains__(self, key):
+        key = self._string_to_object_id(key)
+        return bool(self.connection.find({'_id': key}).count())
+
+    def __iter__(self):
+        """
+        Yields all keys available for this connection.
+        """
+        return iter(self.connection.find(spec={}, fields={'_id': 1}))
+
+    def __len__(self):
+        return self.connection.count()
+
+    #----------------------+
+    #  Private attributes  |
+    #----------------------+
+
+    def _decorate(self, model, primary_key, raw_data):
+        data = dict(raw_data)
+        key = data.pop('_id')
+        # this case is for queries where we don't know the PKs in advance;
+        # however, we do know them when fetching a certain document by PK
+        if primary_key is None:
+            primary_key = self._object_id_to_string(key)
+        return super(StorageAdapter, self)._decorate(model, primary_key, data)
+
+    def _object_id_to_string(self, pk):
+        if isinstance(pk, ObjectId):
+            return u'x-objectid-{0}'.format(pk)
+        return pk
+
+    def _string_to_object_id(self, pk):
+        # XXX check consistency
+        # MongoDB will *not* find items by the str/unicode representation of
+        # ObjectId so we must wrap them; however, it *will* find items if their
+        # ids were explicitly defined as plain strings. These strings will most
+        # likely be not accepted by ObjectId as arguments.
+        # Also check StorageAdapter.__contains__, same try/catch there.
+        #print 'TESTING GET', model.__name__, primary_key
+        assert isinstance(pk, basestring)
+        if pk.startswith('x-objectid-'):
+            return ObjectId(pk.split('x-objectid-')[1])
+        return pk
+
+    #--------------+
+    #  Public API  |
+    #--------------+
+
+    def clear(self):
+        """
+        Clears the whole storage from data.
+        """
+        self.connection.remove()
+
+    def connect(self):
+        host = self._connection_options.get('host', '127.0.0.1')
+        port = self._connection_options.get('port', 27017)
+        database_name = self._connection_options.get('database', 'default')
+        collection_name = self._connection_options.get('collection', 'default')
+
+        self._mongo_connection = pymongo.Connection(host, port)
+        self._mongo_database = self._mongo_connection[database_name]
+        self._mongo_collection = self._mongo_database[collection_name]
+        self.connection = self._mongo_collection
+
+    def delete(self, primary_key):
+        """
+        Permanently deletes the record with given primary key from the database.
+        """
+        primary_key = self._string_to_object_id(primary_key)
+        self.connection.remove({'_id': primary_key})
+
+    def disconnect(self):
+        self._mongo_connection.disconnect()
+        self._mongo_connection = None
+        self._mongo_database = None
+        self._mongo_collection = None
+        self.connection = None
+
+    def get(self, model, primary_key):
+        """
+        Returns model instance for given model and primary key.
+        Raises KeyError if there is no item with given key in the database.
+        """
+        obj_id = self._string_to_object_id(primary_key)
+        data = self.connection.find_one({'_id': obj_id})
+        if data:
+            return self._decorate(model, str(primary_key), data)
+        raise KeyError('collection "{collection}" of database "{database}" '
+                       'does not contain key "{key}"'.format(
+                           database = self._mongo_database.name,
+                           collection = self._mongo_collection.name,
+                           key = str(primary_key)
+                       ))
+
+    def get_many(self, doc_class, primary_keys):
+        """
+        Returns a list of documents with primary keys from given list. More
+        efficient than calling :meth:`~StorageAdapter.get` multiple times.
+        """
+        obj_ids = [self._string_to_object_id(pk) for pk in primary_keys]
+        results = self.connection.find({'_id': {'$in': obj_ids}}) or []
+        assert len(results) <= len(primary_keys), '_id must be unique'
+        _get_obj_pk = lambda obj: str(self._object_id_to_string(data['_id']))
+        if len(data) == len(primary_keys):
+            return [self._decorate(model, _get_obj_pk(obj), data)
+                    for data in results]
+        keys = [_get_obj_pk(obj) for obj in results]
+        missing_keys = [pk for pk in keys if pk not in primary_keys]
+        raise KeyError('collection "{collection}" of database "{database}" '
+                       'does not contain keys "{keys}"'.format(
+                           database = self._mongo_database.name,
+                           collection = self._mongo_collection.name,
+                           keys = ', '.join(missing_keys)))
+
+    def save(self, data, primary_key=None):
+        """
+        Saves given model instance into the storage. Returns primary key.
+
+        :param data:
+            dict containing all properties to be saved
+        :param primary_key:
+            the key for given object; if undefined, will be generated
+
+        Note that you must provide current primary key for a model instance which
+        is already in the database in order to update it instead of copying it.
+        """
+        outgoing = data.copy()
+        if primary_key:
+            outgoing.update({'_id': self._string_to_object_id(primary_key)})
+#        print outgoing
+        obj_id = self.connection.save(outgoing)
+        return self._object_id_to_string(obj_id) or primary_key
+#        return unicode(self.connection.save(outgoing) or primary_key)

doqu/ext/shelve_db/__init__.py

 __all__ = ['StorageAdapter']
 
 
-class StorageAdapter(BaseStorageAdapter):
-    """
-    :param path:
-        relative or absolute path to the database file (e.g. `test.db`)
-
-    """
-
-    supports_nested_data = True
-    converter_manager = converter_manager
-    lookup_manager = lookup_manager
-
-    #--------------------+
-    #  Magic attributes  |
-    #--------------------+
-
-    def __contains__(self, key):
-        return key in self.connection
-
-    def __iter__(self):
-        return iter(self.connection)
-
-    def __len__(self):
-        return len(self.connection)
-
-    def _generate_uid(self):
-        key = str(uuid.uuid4())
-        assert key not in self
-        return key
-
-    #--------------+
-    #  Public API  |
-    #--------------+
-
-    def clear(self):
-        """
-        Clears the whole storage from data.
-        """
-        self.connection.clear()
-
-    def connect(self):
-        """
-        Connects to the database. Raises RuntimeError if the connection is not
-        closed yet. Use :meth:`StorageAdapter.reconnect` to explicitly close
-        the connection and open it again.
-        """
-        if self.connection is not None:
-            raise RuntimeError('already connected')
-
-        path = self._connection_options['path']
-        self.connection = shelve.open(path)
-
-        # if you delete the following line, here are reasons of the hideous
-        # warnings that you are going to struggle with:
-        #  http://www.mail-archive.com/python-list@python.org/msg248496.html
-        #  http://bugs.python.org/issue6294
-        # so just don't.
-        atexit.register(lambda: self.connection is not None and
-                                self.connection.close())
-
-    def disconnect(self):
-        """
-        Writes the data into the file, closes the file and deletes the
-        connection.
-        """
-        self.connection.close()
-        self.connection = None
-
-    def delete(self, primary_key):
-        """
-        Permanently deletes the record with given primary key from the database.
-        """
-        del self.connection[primary_key]
-
-    def get(self, model, primary_key):
-        """
-        Returns model instance for given model and primary key.
-        """
-        primary_key = str(primary_key)
-        data = self.connection[primary_key]
-        return self._decorate(model, primary_key, data)
-
-    def save(self, data, primary_key=None, sync=False):
-        """
-        Saves given model instance into the storage. Returns primary key.
-
-        :param data:
-            dict containing all properties to be saved
-        :param primary_key:
-            the key for given object; if undefined, will be generated
-        :param sync:
-            if `True`, the storage is synchronized to disk immediately. This
-            slows down bulk operations but ensures that the data is stored no
-            matter what happens. Normally the data is synchronized on exit.
-
-        Note that you must provide current primary key for a model instance which
-        is already in the database in order to update it instead of copying it.
-        """
-        assert isinstance(data, dict)
-
-        primary_key = str(primary_key or self._generate_uid())
-
-        self.connection[primary_key] = data
-
-        if sync:
-            self.connection.sync()
-
-        return primary_key
-
-    def get_query(self, model):
-        return QueryAdapter(storage=self, model=model)
-
-
 class QueryAdapter(CachedIterator, BaseQueryAdapter):
     """
     The Query class.
         #print 'new sort spec:', sort_spec
 
         return self._clone(extra_ordering=sort_spec)
+
+
+class StorageAdapter(BaseStorageAdapter):
+    """
+    :param path:
+        relative or absolute path to the database file (e.g. `test.db`)
+
+    """
+    supports_nested_data = True
+    converter_manager = converter_manager
+    lookup_manager = lookup_manager
+    query_adapter = QueryAdapter
+
+    #--------------------+
+    #  Magic attributes  |
+    #--------------------+
+
+    def __contains__(self, key):
+        return key in self.connection
+
+    def __iter__(self):
+        return iter(self.connection)
+
+    def __len__(self):
+        return len(self.connection)
+
+    def _generate_uid(self):
+        key = str(uuid.uuid4())
+        assert key not in self
+        return key
+
+    #--------------+
+    #  Public API  |
+    #--------------+
+
+    def clear(self):
+        """
+        Clears the whole storage from data.
+        """
+        self.connection.clear()
+
+    def connect(self):
+        """
+        Connects to the database. Raises RuntimeError if the connection is not
+        closed yet. Use :meth:`StorageAdapter.reconnect` to explicitly close
+        the connection and open it again.
+        """
+        if self.connection is not None:
+            raise RuntimeError('already connected')
+
+        path = self._connection_options['path']
+        self.connection = shelve.open(path)
+
+        # if you delete the following line, here are reasons of the hideous
+        # warnings that you are going to struggle with:
+        #  http://www.mail-archive.com/python-list@python.org/msg248496.html
+        #  http://bugs.python.org/issue6294
+        # so just don't.
+        atexit.register(lambda: self.connection is not None and
+                                self.connection.close())
+
+    def disconnect(self):
+        """
+        Writes the data into the file, closes the file and deletes the
+        connection.
+        """
+        self.connection.close()
+        self.connection = None
+
+    def delete(self, primary_key):
+        """
+        Permanently deletes the record with given primary key from the database.
+        """
+        del self.connection[primary_key]
+
+    def get(self, model, primary_key):
+        """
+        Returns model instance for given model and primary key.
+        """
+        primary_key = str(primary_key)
+        data = self.connection[primary_key]
+        return self._decorate(model, primary_key, data)
+
+    def save(self, data, primary_key=None, sync=False):
+        """
+        Saves given model instance into the storage. Returns primary key.
+
+        :param data:
+            dict containing all properties to be saved
+        :param primary_key:
+            the key for given object; if undefined, will be generated
+        :param sync:
+            if `True`, the storage is synchronized to disk immediately. This
+            slows down bulk operations but ensures that the data is stored no
+            matter what happens. Normally the data is synchronized on exit.
+
+        Note that you must provide current primary key for a model instance which
+        is already in the database in order to update it instead of copying it.
+        """
+        assert isinstance(data, dict)
+
+        primary_key = str(primary_key or self._generate_uid())
+
+        self.connection[primary_key] = data
+
+        if sync:
+            self.connection.sync()
+
+        return primary_key

doqu/ext/shelve_db/lookups.py

 # we define operations as functions that expect
 mapping = {
     'between':      lambda a,b: b is not None and a[0] <= b <= a[1],
-    'contains':     lambda a,b: a in b,
+    'contains':     lambda a,b: (a in b if isinstance(a, basestring) else
+                                 all(x in b for x in a)),
     'contains_any': lambda a,b: b is not None and any(x in b for x in a),
     'endswith':     lambda a,b: b is not None and b.endswith(a),
     'equals':       lambda a,b: a.pk == b if hasattr(a, 'pk') else a == b,

doqu/ext/shove_db/__init__.py

     .. _SQLAlchemy's: http://www.sqlalchemy.org/docs/04/dbengine.html#dbengine_establishing
 
     """
-    supports_nested_data = True
-
-    converter_manager = shelve_db.converter_manager
-    lookup_manager = shelve_db.lookup_manager
-
     #--------------------+
     #  Magic attributes  |
     #--------------------+
             raise RuntimeError('already connected')
 
         self.connection = Shove(**self._connection_options)
-
-    #--------------+
-    #  Public API  |
-    #--------------+
-
-    def get_query(self, model):
-        return QueryAdapter(storage=self, model=model)
-
-
-class QueryAdapter(shelve_db.QueryAdapter):
-    """
-    The Query class.
-    """

doqu/ext/tokyo_cabinet/__init__.py

 __all__ = ['StorageAdapter']
 
 
-class StorageAdapter(BaseStorageAdapter):
-    """
-    :param path:
-        relative or absolute path to the database file (e.g. `test.tct`)
-
-    .. note::
-
-        Currently only *table* flavour of Tokyo Cabinet databases is supported.
-        It is uncertain whether it is worth supporting other flavours as they
-        do not provide query mechanisms other than access by primary key.
-
-    """
-
-    supports_nested_data = False
-    converter_manager = converter_manager
-    lookup_manager = lookup_manager
-
-    #--------------------+
-    #  Magic attributes  |
-    #--------------------+
-
-    def __contains__(self, key):
-        return key in self.connection
-
-    def __iter__(self):
-        return iter(self.connection)
-
-    def __len__(self):
-        return len(self.connection)
-
-    #--------------+
-    #  Public API  |
-    #--------------+
-
-    def clear(self):
-        """
-        Clears the whole storage from data, resets autoincrement counters.
-        """
-        self.connection.clear()
-
-    def connect(self):
-        """
-        Connects to the database. Raises RuntimeError if the connection is not
-        closed yet. Use :meth:`StorageAdapter.reconnect` to explicitly close
-        the connection and open it again.
-        """
-        if self.connection is not None:
-            raise RuntimeError('already connected')
-
-        path = self._connection_options['path']
-        self.connection = tc.TDB()
-        self.connection.open(path, tc.TDBOWRITER | tc.TDBOCREAT)
-
-    def delete(self, primary_key):
-        """
-        Permanently deletes the record with given primary key from the database.
-        """
-        del self.connection[primary_key]
-
-    def disconnect(self):
-        """
-        Closes internal store and removes the reference to it.
-        """
-        self.connection.close()
-        self.connection = None
-
-    def get(self, doc_class, primary_key):
-        """
-        Returns document object for given document class and primary key.
-        """
-        data = self.connection[primary_key]
-        return self._decorate(doc_class, primary_key, data)
-
-    def save(self, data, primary_key=None):
-        """
-        Saves given model instance into the storage. Returns primary key.
-
-        :param data:
-            dict containing all properties to be saved
-        :param primary_key:
-            the key for given object; if undefined, will be generated
-
-        Note that you must provide current primary key for a document object
-        which is already in the database in order to update it instead of
-        copying it.
-        """
-        # sanitize data for Tokyo Cabinet:
-        # None-->'None' is wrong, force None-->''
-        for key in data:
-            if data[key] is None:
-                data[key] = ''
-            try:
-                data[key] = str(data[key])
-            except UnicodeEncodeError:
-                data[key] = unicode(data[key]).encode('UTF-8')
-
-        primary_key = primary_key or unicode(self.connection.uid())
-
-        self.connection[primary_key] = data
-
-        return primary_key
-
-    def get_query(self, model):
-        return QueryAdapter(storage=self, model=model)
-
-
 class QueryAdapter(CachedIterator, BaseQueryAdapter):
     """
     The Query class.
         See pyrant.query.Query.filter documentation for details.
         """
         conditions = list(self._get_native_conditions(lookups, negate))
-#        print lookups, '  -->  ', conditions
+
+        # XXX hm, the manager returns a plain list without grouping; I guess
+        # it's broken and needs a redesign
+        conditions = [conditions] if conditions else None
 
         #for x in native_conditions:
         #    q = q.filter(**x)
         Deletes all records that match current query.
         """
         self._query.remove()
+
+
+class StorageAdapter(BaseStorageAdapter):
+    """
+    :param path:
+        relative or absolute path to the database file (e.g. `test.tct`)
+
+    .. note::
+
+        Currently only *table* flavour of Tokyo Cabinet databases is supported.
+        It is uncertain whether it is worth supporting other flavours as they
+        do not provide query mechanisms other than access by primary key.
+
+    """
+
+    supports_nested_data = False
+    converter_manager = converter_manager
+    lookup_manager = lookup_manager
+    query_adapter = QueryAdapter
+
+    #--------------------+
+    #  Magic attributes  |
+    #--------------------+
+
+    def __contains__(self, key):
+        return key in self.connection
+
+    def __iter__(self):
+        return iter(self.connection)
+
+    def __len__(self):
+        return len(self.connection)
+
+    #--------------+
+    #  Public API  |
+    #--------------+
+
+    def clear(self):
+        """
+        Clears the whole storage from data, resets autoincrement counters.
+        """
+        self.connection.clear()
+
+    def connect(self):
+        """
+        Connects to the database. Raises RuntimeError if the connection is not
+        closed yet. Use :meth:`StorageAdapter.reconnect` to explicitly close
+        the connection and open it again.
+        """
+        if self.connection is not None:
+            raise RuntimeError('already connected')
+
+        path = self._connection_options['path']
+        self.connection = tc.TDB()
+        self.connection.open(path, tc.TDBOWRITER | tc.TDBOCREAT)
+
+    def delete(self, primary_key):
+        """
+        Permanently deletes the record with given primary key from the database.
+        """
+        del self.connection[primary_key]
+
+    def disconnect(self):
+        """
+        Closes internal store and removes the reference to it.
+        """
+        self.connection.close()
+        self.connection = None
+
+    def get(self, doc_class, primary_key):
+        """
+        Returns document object for given document class and primary key.
+        """
+        data = self.connection[primary_key]
+        return self._decorate(doc_class, primary_key, data)
+
+    def save(self, data, primary_key=None):
+        """
+        Saves given model instance into the storage. Returns primary key.
+
+        :param data:
+            dict containing all properties to be saved
+        :param primary_key:
+            the key for given object; if undefined, will be generated
+
+        Note that you must provide current primary key for a document object
+        which is already in the database in order to update it instead of
+        copying it.
+        """
+        # sanitize data for Tokyo Cabinet:
+        # None-->'None' is wrong, force None-->''
+        for key in data:
+            if data[key] is None:
+                data[key] = ''
+            try:
+                data[key] = str(data[key])
+            except UnicodeEncodeError:
+                data[key] = unicode(data[key]).encode('UTF-8')
+
+        primary_key = primary_key or unicode(self.connection.uid())
+
+        self.connection[primary_key] = data
+
+        return primary_key

doqu/ext/tokyo_cabinet/converters.py

 
     @classmethod
     def to_db(self, value, storage):
-        return ', '.join(value)
+        return ', '.join(unicode(x) for x in value)
 
 
 @converter_manager.register(Document)

doqu/ext/tokyo_tyrant/storage.py

     supports_nested_data = False
     converter_manager = converter_manager
     lookup_manager = lookup_manager
+    query_adapter = QueryAdapter
 
     #--------------------+
     #  Magic attributes  |
     def disconnect(self):
         self.connection = None
 
-    def get_query(self, model):
-        return QueryAdapter(storage=self, model=model)
-
     def save(self, data, primary_key=None):
         """
         Saves given model instance into the storage. Returns primary key.

tests/base_query.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+Base for backend-specific query tests.
+
+Not intended to be run itself.
+"""
+import os
+import unittest2 as unittest
+
+from doqu import Document, get_db
+from doqu.utils import is_doc_valid
+from doqu import validators
+
+
+__test__ = False  # tell nose to skip this module
+
+
+class Person(Document):
+    structure = dict(
+        name = unicode,
+        age = int,
+    )
+
+
+class BaseQueryTestCase(unittest.TestCase):
+    FIXTURE = {
+        'john-id': {'name': u'John', 'age': 30},
+        'mary-id': {'name': u'Mary', 'age': 25}
+    }
+    TMP_FILENAME_EXTENSION = 'tmp'  # matters for Tokyo Cabinet
+
+    #-- These should be overloaded:
+
+    def get_connection(self):
+        raise NotImplementedError('Backend test case must implement '
+                                  'the method get_connection().')
+
+    #-- All below till EOF should not be touched or overloaded
+
+    @property
+    def _tmp_filename(self):
+        return os.path.abspath('doqu_tests_{modname}.{clsname}.{ext}'.format(
+            modname = __name__,
+            clsname = self.__class__.__name__,
+            ext = self.TMP_FILENAME_EXTENSION
+        ))
+
+    def setUp(self):
+        self.db = self.get_connection()
+        for pk, data in self.FIXTURE.items():
+            self.db.save(data, primary_key=pk)
+
+    def tearDown(self):
+        self.db.disconnect()
+        if os.path.exists(self._tmp_filename):
+            os.unlink(self._tmp_filename)
+
+    def assert_finds(self, *names, **lookups):
+        # asserts that given lookups yield all
+        # people with given names and nobody else
+        query = self.db.find(Person).where(**lookups)
+        self.assertEquals(sorted(names), sorted(x['name'] for x in query))
+        return query
+
+    def assert_finds_nobody(self, **lookups):
+        # syntax sugar
+        return self.assert_finds(**lookups)
+
+    #-- Actual tests
+
+    def test_no_conditions(self):
+        "Simple schema-bound query"
+#        q = self.db.find(Person)
+#        self.assert_finds(q, 'John', 'Mary')
+
+        self.assert_finds('John', 'Mary')
+
+    """
+    'between':      lambda k,v,p: (k, tc.TDBQCNUMBT, [int(p(x)) for x in v]),
+    'contains':     str_or_list('contains'),
+    'contains_any': lambda k,v,p: (k, tc.TDBQCSTROR, p(v)),
+    'endswith':     lambda k,v,p: (k, tc.TDBQCSTREW, p(v)),
+    'equals':       str_or_num('equals'),
+    'exists':       lambda k,v,p: (k, tc.TDBQCSTRRX, ''),
+    'gt':           lambda k,v,p: (k, tc.TDBQCNUMGT, p(v)),
+    'gte':          lambda k,v,p: (k, tc.TDBQCNUMGE, p(v)),
+    'in':           str_or_num('in'),
+    'like':         str_or_list('like'),
+    'like_any':     lambda k,v,p: (k, tc.TDBQCFTSOR, p(v)),
+    'lt':           lambda k,v,p: (k, tc.TDBQCNUMLT, p(v)),
+    'lte':          lambda k,v,p: (k, tc.TDBQCNUMLE, p(v)),
+    'matches':      lambda k,v,p: (k, tc.TDBQCSTRRX, p(v)),
+    'search':       lambda k,v,p: (k, tc.TDBQCFTSEX, p(v)),
+    'startswith':   lambda k,v,p: (k, tc.TDBQCSTRBW, p(v)),
+    'year':         lambda k,v,p: (k, tc.TDBQCSTRRX, '^%d....'%v),
+    'month':        lambda k,v,p: (k, tc.TDBQCSTRRX, '^....%0.2d..'%v),
+    'day':          lambda k,v,p: (k, tc.TDBQCSTRRX, '^......%0.2d'%v),
+    """
+
+    def test_op_between(self):
+        self.assert_finds('John', age__between=(26, 55))
+        self.assert_finds('John', 'Mary', age__between=(25, 55))
+
+    def test_op_contains(self):
+        self.assert_finds('John', name__contains='J')
+        self.assert_finds('John', name__contains=['J', 'o'])
+        self.assert_finds_nobody(name__contains=['J', 'M'])  # nobody
+
+    def test_op_contains_any(self):
+        self.assert_finds('John', name__contains_any=['J'])
+        self.assert_finds('John', 'Mary', name__contains_any=['J','M'])
+
+    def test_op_endswith(self):
+        self.assert_finds('John', name__endswith='hn')
+        self.assert_finds('Mary', name__endswith='y')
+
+    def test_op_equals(self):
+        "Items can be found by simple value comparison"
+        self.assert_finds('John', name='John')
+        self.assert_finds('Mary', age=25)
+
+    def test_op_exists(self):
+        self.assert_finds('John', 'Mary', name__exists=True)
+        self.assert_finds_nobody(name__exists=False)
+        self.assert_finds_nobody(whatchamacallit__exists=True)
+        self.assert_finds('John', 'Mary', whatchamacallit__exists=False)
+
+    def test_op_gt(self):
+        self.assert_finds('John', age__gt=25)
+
+    def test_op_gte(self):
+        self.assert_finds('John', 'Mary', age__gte=25)
+
+    def test_op_in(self):
+        self.assert_finds('John', age__in=[29,30])
+        self.assert_finds('Mary', age__in=[15,25])
+        self.assert_finds('John', 'Mary', age__in=[25,29,30])
+
+    def test_op_lt(self):
+        self.assert_finds('Mary', age__lt=30)
+
+    def test_op_lte(self):
+        self.assert_finds('John', 'Mary', age__lte=30)
+
+    def test_op_matches(self):
+        self.assert_finds('Mary', name__matches='M.ry')
+        self.assert_finds('Mary', name__matches='^M')
+        self.assert_finds_nobody(name__matches='^m')
+
+    def test_op_startswith(self):
+        self.assert_finds('John', name__startswith='J')
+        self.assert_finds('Mary', name__startswith='M')
+
+    @unittest.expectedFailure
+    def test_op_year(self):
+        raise NotImplementedError
+
+    @unittest.expectedFailure
+    def test_op_month(self):
+        raise NotImplementedError
+
+    @unittest.expectedFailure
+    def test_op_day(self):
+        raise NotImplementedError
+
+
+if __name__ == '__main__':
+    unittest.main()

tests/test_ext_mongodb.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"MongoDB backend tests."
+from doqu.ext.mongodb import StorageAdapter
+import base_query
+
+
+class MongoQueryTestCase(base_query.BaseQueryTestCase):
+    def get_connection(self):
+        return StorageAdapter(database='doqu_tests', collection='doqu_tests')
+
+    def test_op_matches_caseless(self):
+        self.assert_finds('Mary', name__matches_caseless='^M')
+        self.assert_finds('Mary', name__matches_caseless='^m')

tests/test_ext_shelve.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"Shelve (BDB) backend tests."
+from doqu.ext.shelve_db import StorageAdapter
+import base_query
+
+
+class ShelveQueryTestCase(base_query.BaseQueryTestCase):
+    def get_connection(self):
+        return StorageAdapter(path=self._tmp_filename)

tests/test_ext_shove.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"Shove backend tests."
+from doqu.ext.shove_db import StorageAdapter
+import base_query
+
+
+class ShoveQueryTestCase(base_query.BaseQueryTestCase):
+    def get_connection(self):
+        return StorageAdapter()

tests/test_ext_tokyo_cabinet.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"Tokyo Cabinet backend tests."
+import os
+from doqu.ext.tokyo_cabinet import StorageAdapter
+import base_query
+
+
+class TokyoCabinetQueryTestCase(base_query.BaseQueryTestCase):
+    def get_connection(self):
+        return StorageAdapter(path=self._tmp_filename)
+
+    def test_op_like(self):
+        raise NotImplementedError
+
+    def test_op_like_any(self):
+        raise NotImplementedError
+
+    def test_op_search(self):
+        raise NotImplementedError

tests/test_ext_tokyo_tyrant.py

+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"TokyoTyrant backend tests."
+import os
+import time
+
+from doqu.ext.tokyo_tyrant import StorageAdapter
+import test_ext_tokyo_cabinet as tc
+
+
+class TokyoTyrantQueryTestCase(tc.TokyoCabinetQueryTestCase):
+    TMP_FILENAME_EXTENSION = 'tct'
+    # A sandbox Tyrant instance parametres:
+    TYRANT_HOST = '127.0.0.1'
+    TYRANT_PORT = 1983    # default is 1978 so we avoid clashes
+
+    @property
+    def _pid_filename(self):
+        return os.path.abspath('{0}.pid'.format(self._tmp_filename))
+
+    def _start_tyrant(self):
+        tmpl = 'ttserver -dmn -host {host} -port {port} -pid {pid} {file}'
+        cmd = tmpl.format(host=self.TYRANT_HOST, port=self.TYRANT_PORT,
+                          pid=self._pid_filename, file=self._tmp_filename)
+        if os.path.exists(self._pid_filename):
+            return
+        # we need to wait a bit before and after restarting the server or we'll
+        # get a nasty "connection refused" error
+        time.sleep(0.1)
+        os.popen(cmd).read()
+        time.sleep(0.1)
+
+    def _stop_tyrant(self):
+        if os.path.exists(self._pid_filename):
+            pid = open(self._pid_filename).read()
+        else:
+            # try by port
+            cmd = ('ps -e -o pid,command | grep "ttserver" | '
+                   'grep "\-port {0}"').format(self.TYRANT_PORT)
+            line = os.popen(cmd).read()
+            try:
+                pid = int(line.strip().split(' ')[0])
+            except ValueError:
+                'Expected "pid command" format, got {0}'.format(line)
+
+        os.popen('kill {pid}'.format(**locals()))
+        while 'pid' in os.popen('ps x -o pid|grep {0}'.format(pid)):
+            time.sleep(0.1)
+        if os.path.exists(self._pid_filename):
+            os.unlink(self._pid_filename)
+
+    def get_connection(self):
+        self._start_tyrant()
+        return StorageAdapter(host=self.TYRANT_HOST, port=self.TYRANT_PORT)
+
+    def tearDown(self):
+        self.db.disconnect()
+        self._stop_tyrant()
+        super(TokyoTyrantQueryTestCase, self).tearDown()