Jean-Tiare Le Bigot avatar Jean-Tiare Le Bigot committed 16aa54c

rename 'routes' to 'operations' for consistency with DDB names (for real this time) + add big picture in 'extending' section of documentation

Comments (0)

Files changed (32)

 - support for ``DeleteTable`` method
 - support for ``UpdateTable`` method
 - support for ``DescribeTable`` method
-- support for ``GetItem method
+- support for ``GetItem`` method
 - support for ``PutItem`` method
 - support for ``DeleteItem`` method
 - support for ``UpdateItem`` method (small approximations)
 ------------------------
 
 - no support for ``BatchWriteItem``
-- no support for "Excusive Start Key", "Reverse" and "Limit" in
-``Query`` and ``Scan``
+- no support for "Excusive Start Key", "Reverse" and "Limit" in ``Query`` and ``Scan``
 - no support for "UnprocessedKeys" in ``BatchGetItem``
 - Web entry-point is untested, fill bugs if necessary :)

ddbmock/operations/__init__.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def load_table(func):
+    def loader(post, *args):
+        name = post[u'TableName']
+        table = dynamodb.get_table(name)
+
+        return func(post, table, *args)
+    return loader
+

ddbmock/operations/batch_get_item.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def batch_get_item(post):
+    #TODO: limit to 100/batch
+    #TODO: unprocessed keys
+
+    return {
+        "Responses": dynamodb.get_batch(post[u'RequestItems']),
+    }
+

ddbmock/operations/batch_write_item.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def batch_write_item(post):
+    #TODO: limit to 25/batch
+    #TODO: unprocessed keys
+
+    return {
+        "Responses": dynamodb.write_batch(post[u'RequestItems']),
+    }

ddbmock/operations/create_table.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def create_table(post):
+    table = dynamodb.create_table(post[u'TableName'], post)
+
+    return {
+        "TableDescription": table.to_dict(verbose=False),
+    }

ddbmock/operations/delete_item.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_write_throughput
+
+@load_table
+def delete_item(post, table):
+    item = table.delete_item(post[u'Key'], post[u'Expected'])
+
+    capacity = item.get_size().as_units()
+    push_write_throughput(table.name, capacity)
+
+    if post[u'ReturnValues'] == "ALL_OLD":
+        return {
+            "ConsumedCapacityUnits": capacity,
+            "Attributes": item,
+        }
+    else:
+        return {
+            "ConsumedCapacityUnits": capacity,
+        }

ddbmock/operations/delete_table.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def delete_table(post):
+    name = post[u'TableName']
+    table = dynamodb.delete_table(name)
+
+    return {
+        'TableDescription': table.to_dict(verbose=False)
+    }

ddbmock/operations/describe_table.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+
+@load_table
+def describe_table(post, table):
+    return {
+        "Table": table.to_dict()
+    }

ddbmock/operations/get_item.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_read_throughput
+
+@load_table
+def get_item(post, table):
+    base_capacity = 1 if post[u'ConsistentRead'] else 0.5
+    item = table.get(post[u'Key'], post[u'AttributesToGet'])
+
+    if item is not None:
+        capacity = base_capacity*item.get_size().as_units()
+        push_read_throughput(table.name, capacity)
+        return {
+            "ConsumedCapacityUnits": capacity,
+            "Item": item,
+        }
+    else:
+        push_read_throughput(table.name, base_capacity)
+        return {
+            "ConsumedCapacityUnits": base_capacity,
+        }

ddbmock/operations/list_tables.py

+# -*- coding: utf-8 -*-
+
+from ddbmock.database import dynamodb
+
+def list_tables(post):
+    return {
+        'TableNames': dynamodb.list_tables()
+    }
+

ddbmock/operations/put_item.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_write_throughput
+
+@load_table
+def put_item(post, table):
+    old, new = table.put(post[u'Item'], post[u'Expected'])
+    capacity = max(old.get_size().as_units(), new.get_size().as_units())
+
+    push_write_throughput(table.name, capacity)
+
+    ret = {
+        "ConsumedCapacityUnits": capacity,
+    }
+
+    if post[u'ReturnValues'] == "ALL_OLD":
+        ret["Attributes"] = old
+
+    return ret

ddbmock/operations/query.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_write_throughput
+from ddbmock.errors import ValidationException
+
+@load_table
+def query(post, table):
+    if post[u'AttributesToGet'] and post[u'Count']:
+        raise ValidationException("Can filter fields when only count is requested")
+
+    base_capacity = 1 if post[u'ConsistentRead'] else 0.5
+
+    results = table.query(
+        post[u'HashKeyValue'],
+        post[u'RangeKeyCondition'],
+        post[u'AttributesToGet'],
+        post[u'ExclusiveStartKey'],
+        not post[u'ScanIndexForward'],
+        post[u'Limit'],
+    )
+
+    capacity = base_capacity*results.size.as_units()
+    push_write_throughput(table.name, capacity)
+
+    ret = {
+        "Count": len(results.items),
+        "ConsumedCapacityUnits": capacity,
+    }
+
+    if results.last_key is not None:
+        ret['LastEvaluatedKey'] = results.last_key
+
+    if not post[u'Count']:
+        ret[u'Items'] = results.items
+
+    return ret

ddbmock/operations/scan.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_write_throughput
+from ddbmock.errors import ValidationException
+
+@load_table
+def scan(post, table):
+    if post[u'AttributesToGet'] and post[u'Count']:
+        raise ValidationException("Can not filter fields when only count is requested")
+
+    results = table.scan(
+        post[u'ScanFilter'],
+        post[u'AttributesToGet'],
+        post[u'ExclusiveStartKey'],
+        post[u'Limit'],
+    )
+
+    capacity = 0.5*results.size.as_units()
+    push_write_throughput(table.name, capacity)
+
+    ret = {
+        "Count": len(results.items),
+        "ScannedCount": results.scanned,
+        "ConsumedCapacityUnits": capacity,
+        #TODO: last evaluated key where applicable
+    }
+
+    if not post[u'Count']:
+        ret[u'Items'] = results.items
+
+    return ret

ddbmock/operations/update_item.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+from ddbmock.utils import push_write_throughput
+
+@load_table
+def update_item(post, table):
+    old, new = table.update_item(
+            post[u'Key'],
+            post[u'AttributeUpdates'],
+            post[u'Expected'],
+    )
+
+    capacity = max(old.get_size().as_units(), new.get_size().as_units())
+    push_write_throughput(table.name, capacity)
+    ret = {"ConsumedCapacityUnits": capacity}
+
+    if post[u'ReturnValues'] == "ALL_OLD":
+        ret["Attributes"] = old
+    elif post[u'ReturnValues'] == "ALL_NEW":
+        ret["Attributes"] = new
+    elif post[u'ReturnValues'] == "UPDATED_OLD":
+        ret["Attributes"] = old - new
+    elif post[u'ReturnValues'] == "UPDATED_NEW":
+        ret["Attributes"] = new - old
+
+    return ret

ddbmock/operations/update_table.py

+# -*- coding: utf-8 -*-
+
+from . import load_table
+
+@load_table
+def update_table(post, table):
+    table.update_throughput(post[u'ProvisionedThroughput'][u'ReadCapacityUnits'],
+                            post[u'ProvisionedThroughput'][u'WriteCapacityUnits'],
+                           )
+
+    desc = table.to_dict()
+
+    return {
+        "TableDescription": desc,
+    }

ddbmock/routes/__init__.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def load_table(func):
-    def loader(post, *args):
-        name = post[u'TableName']
-        table = dynamodb.get_table(name)
-
-        return func(post, table, *args)
-    return loader
-

ddbmock/routes/batch_get_item.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def batch_get_item(post):
-    #TODO: limit to 100/batch
-    #TODO: unprocessed keys
-
-    return {
-        "Responses": dynamodb.get_batch(post[u'RequestItems']),
-    }
-

ddbmock/routes/batch_write_item.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def batch_write_item(post):
-    #TODO: limit to 25/batch
-    #TODO: unprocessed keys
-
-    return {
-        "Responses": dynamodb.write_batch(post[u'RequestItems']),
-    }

ddbmock/routes/create_table.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def create_table(post):
-    table = dynamodb.create_table(post[u'TableName'], post)
-
-    return {
-        "TableDescription": table.to_dict(verbose=False),
-    }

ddbmock/routes/delete_item.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_write_throughput
-
-@load_table
-def delete_item(post, table):
-    item = table.delete_item(post[u'Key'], post[u'Expected'])
-
-    capacity = item.get_size().as_units()
-    push_write_throughput(table.name, capacity)
-
-    if post[u'ReturnValues'] == "ALL_OLD":
-        return {
-            "ConsumedCapacityUnits": capacity,
-            "Attributes": item,
-        }
-    else:
-        return {
-            "ConsumedCapacityUnits": capacity,
-        }

ddbmock/routes/delete_table.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def delete_table(post):
-    name = post[u'TableName']
-    table = dynamodb.delete_table(name)
-
-    return {
-        'TableDescription': table.to_dict(verbose=False)
-    }

ddbmock/routes/describe_table.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-
-@load_table
-def describe_table(post, table):
-    return {
-        "Table": table.to_dict()
-    }

ddbmock/routes/get_item.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_read_throughput
-
-@load_table
-def get_item(post, table):
-    base_capacity = 1 if post[u'ConsistentRead'] else 0.5
-    item = table.get(post[u'Key'], post[u'AttributesToGet'])
-
-    if item is not None:
-        capacity = base_capacity*item.get_size().as_units()
-        push_read_throughput(table.name, capacity)
-        return {
-            "ConsumedCapacityUnits": capacity,
-            "Item": item,
-        }
-    else:
-        push_read_throughput(table.name, base_capacity)
-        return {
-            "ConsumedCapacityUnits": base_capacity,
-        }

ddbmock/routes/list_tables.py

-# -*- coding: utf-8 -*-
-
-from ddbmock.database import dynamodb
-
-def list_tables(post):
-    return {
-        'TableNames': dynamodb.list_tables()
-    }
-

ddbmock/routes/put_item.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_write_throughput
-
-@load_table
-def put_item(post, table):
-    old, new = table.put(post[u'Item'], post[u'Expected'])
-    capacity = max(old.get_size().as_units(), new.get_size().as_units())
-
-    push_write_throughput(table.name, capacity)
-
-    ret = {
-        "ConsumedCapacityUnits": capacity,
-    }
-
-    if post[u'ReturnValues'] == "ALL_OLD":
-        ret["Attributes"] = old
-
-    return ret

ddbmock/routes/query.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_write_throughput
-from ddbmock.errors import ValidationException
-
-@load_table
-def query(post, table):
-    if post[u'AttributesToGet'] and post[u'Count']:
-        raise ValidationException("Can filter fields when only count is requested")
-
-    base_capacity = 1 if post[u'ConsistentRead'] else 0.5
-
-    results = table.query(
-        post[u'HashKeyValue'],
-        post[u'RangeKeyCondition'],
-        post[u'AttributesToGet'],
-        post[u'ExclusiveStartKey'],
-        not post[u'ScanIndexForward'],
-        post[u'Limit'],
-    )
-
-    capacity = base_capacity*results.size.as_units()
-    push_write_throughput(table.name, capacity)
-
-    ret = {
-        "Count": len(results.items),
-        "ConsumedCapacityUnits": capacity,
-    }
-
-    if results.last_key is not None:
-        ret['LastEvaluatedKey'] = results.last_key
-
-    if not post[u'Count']:
-        ret[u'Items'] = results.items
-
-    return ret

ddbmock/routes/scan.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_write_throughput
-from ddbmock.errors import ValidationException
-
-@load_table
-def scan(post, table):
-    if post[u'AttributesToGet'] and post[u'Count']:
-        raise ValidationException("Can not filter fields when only count is requested")
-
-    results = table.scan(
-        post[u'ScanFilter'],
-        post[u'AttributesToGet'],
-        post[u'ExclusiveStartKey'],
-        post[u'Limit'],
-    )
-
-    capacity = 0.5*results.size.as_units()
-    push_write_throughput(table.name, capacity)
-
-    ret = {
-        "Count": len(results.items),
-        "ScannedCount": results.scanned,
-        "ConsumedCapacityUnits": capacity,
-        #TODO: last evaluated key where applicable
-    }
-
-    if not post[u'Count']:
-        ret[u'Items'] = results.items
-
-    return ret

ddbmock/routes/update_item.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-from ddbmock.utils import push_write_throughput
-
-@load_table
-def update_item(post, table):
-    old, new = table.update_item(
-            post[u'Key'],
-            post[u'AttributeUpdates'],
-            post[u'Expected'],
-    )
-
-    capacity = max(old.get_size().as_units(), new.get_size().as_units())
-    push_write_throughput(table.name, capacity)
-    ret = {"ConsumedCapacityUnits": capacity}
-
-    if post[u'ReturnValues'] == "ALL_OLD":
-        ret["Attributes"] = old
-    elif post[u'ReturnValues'] == "ALL_NEW":
-        ret["Attributes"] = new
-    elif post[u'ReturnValues'] == "UPDATED_OLD":
-        ret["Attributes"] = old - new
-    elif post[u'ReturnValues'] == "UPDATED_NEW":
-        ret["Attributes"] = new - old
-
-    return ret

ddbmock/routes/update_table.py

-# -*- coding: utf-8 -*-
-
-from . import load_table
-
-@load_table
-def update_table(post, table):
-    table.update_throughput(post[u'ProvisionedThroughput'][u'ReadCapacityUnits'],
-                            post[u'ProvisionedThroughput'][u'WriteCapacityUnits'],
-                           )
-
-    desc = table.to_dict()
-
-    return {
-        "TableDescription": desc,
-    }
Added
New image

docs/pages/extending.rst

 Extending DynamoDB-mock
 #######################
 
+
+Get the source Luke
+===================
+
+::
+
+    $ hg clone ssh://hg@bitbucket.org/Ludia/dynamodb-mock
+    $ pip install nose nosexcover coverage mock webtests boto
+    $ python setup.py develop
+    $ nosetests # --no-skip to run boto integration tests too
+
 Folder structure
 ================
 
             +-- boto    => main/extensive tests
             `-- pyramid => just make sure that all methods are supported
 
+Request flow: the big picture
+=============================
 
-Get the source Luke
-===================
+.. figure::  ../_static/archi.png
+   :align:   center
 
-::
+   Global request flow
 
-    $ hg clone ssh://hg@bitbucket.org/Ludia/dynamodb-mock
-    $ pip install nose nosexcover coverage mock webtests boto
-    $ python setup.py develop
-    $ nosetests # --no-skip to run boto integration tests too
+Just a couple of comments here:
+
+ - The ``router`` relies on introspection to find the validators (if any)
+ - The ``router`` relies on introspection to find the routes
+ - The ``database engine`` relies on introspection to find the configured storage backend
+ - There is a "catch all" in the router that maps to DynamoDB internal server error
 
 
 Adding a method

docs/pages/status.rst

 - No more than 10 ``UPDATING`` tables. WONTFIX
 
 - No more than 1 Throughput decrease/calendar day. DONE
-- No more than *2 Throughput increase/update. DONE
+- No more than \*2 Throughput increase/update. DONE
 
 Types and items Limitations
 ===========================
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.