Commits

Max Noel committed fbaac24

* Added autoincrement_id schema checks and better error reporting.

Comments (0)

Files changed (1)

dynamodb_mapper/model.py

     pass
 
 
-def _create_autoincrement_magic_item(table):
-        item = table.new_item(hash_key=0, attrs={
-            "__max_hash_key__": 0
-        })
-        # Conditional write: don't risk overwriting the DB.
-        item.put({item.hash_key_name: False})
-
-
-def _save_autoincrement_hash_key(item):
-    while True:
-        max_hash_item = item.table.get_item(0, consistent_read=True)
-        max_hash_key = max_hash_item["__max_hash_key__"]
-        max_hash_item["__max_hash_key__"] += 1
-        try:
-            # Conditional write: we're overwriting iff the value hasn't changed
-            max_hash_item.put({"__max_hash_key__": max_hash_key})
-            break
-        except DynamoDBResponseError as e:
-            if e.error_code != "ConditionalCheckFailedException":
-                # Unhandled exception
-                raise
-            # The max key has changed (concurrent write): retry.
-
-    # We just reserved that value for the hash key
-    item[item.hash_key_name] = max_hash_key + 1
-    item.put()
-
-
 class ConnectionBorg(object):
     """Borg that handles access to DynamoDB.
 
             )
             return self._connections[thread_id]
 
+    def _create_autoincrement_magic_item(self, table):
+        item = table.new_item(hash_key=0, attrs={
+            "__max_hash_key__": 0
+        })
+        # Conditional write: don't risk overwriting the DB.
+        item.put({item.hash_key_name: False})
+
     def set_credentials(self, aws_access_key_id, aws_secret_access_key):
         """Set the DynamoDB credentials."""
         self._aws_access_key_id = aws_access_key_id
         table.refresh(wait_for_active=wait_for_active)
 
         if hash_key_type == autoincrement_int:
-            _create_autoincrement_magic_item(table)
+            self._create_autoincrement_magic_item(table)
 
         return table
 
         """
         return {name: getattr(self, name) for name in self.__schema__}
 
+    def _save_autoincrement_hash_key(self, item):
+        """Compute an autoincremented hash_key for an item and save it to the DB.
+
+        TODO Add schema checks.
+        """
+        while True:
+            max_hash_item = item.table.get_item(0, consistent_read=True)
+            max_hash_key = max_hash_item["__max_hash_key__"]
+            max_hash_item["__max_hash_key__"] += 1
+            try:
+                # Conditional write: we're overwriting iff the value hasn't changed
+                max_hash_item.put({"__max_hash_key__": max_hash_key})
+                break
+            except DynamoDBResponseError as e:
+                if e.error_code != "ConditionalCheckFailedException":
+                    # Unhandled exception
+                    raise
+                # The max key has changed (concurrent write): retry.
+
+        # We just reserved that value for the hash key
+        item[item.hash_key_name] = max_hash_key + 1
+        item.put()
+
     def save(self):
         """Save the object to the database.
 
         if (self.__schema__[self.__hash_key__] == autoincrement_int and
                 item_data[self.__hash_key__] == 0):
             # We're inserting a new item in an autoincrementing table.
-            _save_autoincrement_hash_key(item)
+            self._save_autoincrement_hash_key(item)
+            # Update the primary key so that it reflects what it was saved as.
+            setattr(self, self.__hash_key__, item[self.__hash_key__])
         else:
             item.put()