Commits

Simon Cross  committed 4930fbe

Fix all the tests.

  • Participants
  • Parent commits 2a6eb8a

Comments (0)

Files changed (1)

File txriak/tests.py

 See txriak.LICENSE for details.
 """
 
-import os
-import sys
 import json
 import random
 from twisted.trial import unittest
 
 VERBOSE = False
 
-# Since dev directory is not on path, force our parent
-# directory onto python path.
-PATH = os.path.abspath(os.path.dirname(__file__))
-sys.path.insert(0, PATH)
-import riak
+from txriak import riak
 
-log.startLogging(sys.stderr)
-
+# uncomment to activate logging
+# import sys
+# log.startLogging(sys.stderr)
 
 RIAK_CLIENT_ID = 'TEST'
-BUCKET = 'bucket'
+BUCKET_PREFIX = 'txriak.tests.'
+
+JAVASCRIPT_SUM = """
+function(v) {
+  x = v.reduce(function(a,b){ return a + b }, 0);
+  return [x];
+}
+"""
 
 
 def randint():
     return random.randint(1, 999999)
 
 
-@defer.inlineCallbacks
-def cleanup_bucket(keys):
-    """
-    Delete objects defined by passed-in key.
-    Bucket we're working with is global.
-    Objects may not exist, and this is ok.
-    """
-    client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-    bucket = client.bucket(BUCKET)
-
-    for key in keys:
-        if VERBOSE:
-            log.msg('deleting: %s' % key)
-        obj = yield bucket.get(key)
-        yield obj.delete()
-
-    yield bucket.set_allow_multiples(False)
-
-
 class RiakTestCase1(unittest.TestCase):
     """
     trial unit tests.
     """
 
+    test_keys = ['foo', 'foo1', 'foo2', 'foo3', 'bar', 'baz', 'ba_foo1',
+                 'blue_foo1']
+
+    @defer.inlineCallbacks
+    def setUp(self):
+        self.client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
+        self.bucket_name = BUCKET_PREFIX + self.id().rsplit('.', 1)[-1]
+        self.bucket = self.client.bucket(self.bucket_name)
+        yield self.bucket.purge_keys()
+
     @defer.inlineCallbacks
     def tearDown(self):
-        """delete all the bucket objects we might be using"""
-        keys = ['foo', 'foo1', 'foo2', 'bar', 'baz', 'ba_foo1'
-                'foo1', 'foo2', 'foo3', 'blue_foo1']
-        yield cleanup_bucket(keys)
+        yield self.bucket.purge_keys()
 
     @defer.inlineCallbacks
     def test_secondary_index(self):
         log.msg("*** secondary_index")
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        yield bucket.enable_search()
+        yield self.bucket.enable_search()
 
-
-        obj = bucket.new('foo1', {'field1': 'val1', 'field2': 1001})
+        obj = self.bucket.new('foo1', {'field1': 'val1', 'field2': 1001})
         obj.add_index('field1_bin', 'val1')
         obj.add_index('field2_int', 1001)
         yield obj.store()
 
-        obj = bucket.new('foo2', {'field1': 'val2', 'field2': 1003})
+        obj = self.bucket.new('foo2', {'field1': 'val2', 'field2': 1003})
         obj.add_index('field1_bin', 'val2')
         obj.add_index('field2_int', 1003)
         yield obj.store()
 
-        results = yield client.index('bucket', 'field1_bin', 'val2').run()
-        self.assertEqual(results[0], [u'bucket', u'foo2'])
+        results = yield self.client.index(self.bucket_name,
+                                          'field1_bin', 'val2').run()
+        self.assertEqual(results[0], [self.bucket_name, u'foo2'])
 
-        results = yield client.index('bucket', 'field2_int', 1, 2000).run()
+        results = yield self.client.index(self.bucket_name, 'field2_int', 1,
+                                          2000).run()
 
-        self.assertEqual(results, [[u'bucket', u'foo2'], [u'bucket', u'foo1']])
+        self.assertEqual(sorted(results),
+                         [[self.bucket_name, u'foo1'],
+                          [self.bucket_name, u'foo2']])
 
         log.msg("done secondary_index")
 
         """Basic adds and deletes"""
         log.msg("*** add_and_delete")
 
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo1", "test1")
+        obj = self.bucket.new("foo1", "test1")
         yield obj.store()
 
         self.assertEqual(obj.exists(), True)
         obj.set_data('bar1')
         yield obj.store()
 
-        obj = yield bucket.get("foo1")
+        obj = yield self.bucket.get("foo1")
         self.assertEqual(obj.exists(), True)
         self.assertEqual(obj.get_data(), "bar1")
 
         yield obj.delete()
 
-        obj = yield bucket.get("foo1")
+        obj = yield self.bucket.get("foo1")
         self.assertEqual(obj.exists(), False)
         log.msg("done add_and_delete")
 
         """Test searching buckets"""
         log.msg("*** riak_search")
 
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket1 = client.bucket("bucket1")
+        yield self.bucket.enable_search()
 
-        yield bucket1.enable_search()
-
-        se = yield bucket1.search_enabled()
+        se = yield self.bucket.search_enabled()
         self.assertEqual(se, True)
 
-        obj1 = bucket1.new("foo1", {"foo":"test1"})
+        obj1 = self.bucket.new("foo1", {"foo": "test1"})
         yield obj1.store()
 
-        s = client.search('bucket1', 'foo:test1')
+        s = self.client.search(self.bucket_name, 'foo:test1')
         keys = yield s.run()
 
         self.assertTrue(keys[0][1] == u'foo1')
 
         yield obj1.delete()
+        yield self.bucket.disable_search()
 
     @defer.inlineCallbacks
     def test_list_keys(self):
         """Test listing all keys in bucket."""
         log.msg("*** list_keys")
 
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo1", "test1")
+        obj = self.bucket.new("foo1", "test1")
         yield obj.store()
-        obj1 = bucket.new("foo2", "test2")
+        obj1 = self.bucket.new("foo2", "test2")
         yield obj1.store()
 
-        keys = yield bucket.list_keys()
-        self.assertEqual(sorted([u"foo1", u"foo2"]), sorted(keys))
+        keys = yield self.bucket.list_keys()
+        self.assertEqual([u"foo1", u"foo2"], sorted(keys))
 
     @defer.inlineCallbacks
     def test_purge_keys(self):
         """Test purging all keys in a bucket."""
         log.msg("*** purge_keys")
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo1", "test1")
+
+        obj = self.bucket.new("foo1", "test1")
         yield obj.store()
-        obj1 = bucket.new("foo2", "test2")
+        obj1 = self.bucket.new("foo2", "test2")
         yield obj1.store()
 
-        yield bucket.purge_keys()
-        keys = yield bucket.list_keys()
+        yield self.bucket.purge_keys()
+
+        # FIXME: nasty hack to work around purge_keys returning
+        # too soon (or maybe list_keys being weird).
+        import time
+        start = time.time()
+        while True:
+            keys = yield self.bucket.list_keys()
+            if not keys or time.time() - start > 10:
+                break
+            time.sleep(0.2)
         self.assertEqual([], keys)
 
     @defer.inlineCallbacks
         """Test listing all buckets."""
         log.msg("*** list_buckets")
 
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket1 = client.bucket("bucket1")
-        obj1 = bucket1.new("foo1", "test1")
+        obj1 = self.bucket.new("foo1", "test1")
         yield obj1.store()
 
-        bucket2 = client.bucket("bucket2")
+        bucket2_name = "%s2" % self.bucket_name
+        bucket2 = self.client.bucket(bucket2_name)
         obj2 = bucket2.new("foo2", "test2")
         yield obj2.store()
 
-        buckets = yield client.list_buckets()
-        self.assertTrue(u"bucket1" in buckets)
-        self.assertTrue(u"bucket2" in buckets)
+        buckets = yield self.client.list_buckets()
+        # just check that these two buckets exist in case
+        # there are buckets not related to these tests
+        self.assertTrue(self.bucket_name in buckets)
+        self.assertTrue(bucket2_name in buckets)
 
         # Cleanup after ourselves
         yield obj1.delete()
     def test_is_alive(self):
         """Can we ping the riak server."""
         log.msg('*** is_alive')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        client_id = client.get_client_id()
+        client_id = self.client.get_client_id()
         self.assertEqual(client_id, RIAK_CLIENT_ID)
-        alive = yield client.is_alive()
+        alive = yield self.client.is_alive()
         self.assertEqual(alive, True)
         log.msg('done is_alive')
 
     def test_store_and_get(self):
         """Store and get text data."""
         log.msg('*** store_and_get')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
         data = 'blueprint'
-        obj = bucket.new('blue_foo1', data)
+        obj = self.bucket.new('blue_foo1', data)
         yield obj.store()
-        del(obj)
+        del obj
 
-        obj1 = yield bucket.get('blue_foo1')
+        obj1 = yield self.bucket.get('blue_foo1')
         self.assertEqual(obj1.exists(), True)
-        self.assertEqual(obj1.get_bucket().get_name(), BUCKET)
+        self.assertEqual(obj1.get_bucket().get_name(), self.bucket_name)
         self.assertEqual(obj1.get_key(), 'blue_foo1')
         self.assertEqual(obj1.get_data(), data)
         log.msg('done store_and_get')
         """store and get binary data."""
 
         log.msg('*** binary_store_and_get')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
 
         # Store as binary, retrieve as binary, then compare...
         rand = str(randint())
-        obj = bucket.new_binary('foo1', rand)
+        obj = self.bucket.new_binary('foo1', rand)
         yield obj.store()
-        del(obj)
+        del obj
 
-        obj = yield bucket.get_binary('foo1')
+        obj = yield self.bucket.get_binary('foo1')
         self.assertEqual(obj.exists(), True)
         self.assertEqual(obj.get_data(), rand)
-        del(obj)
+        del obj
 
         # Store as JSON, retrieve as binary, JSON-decode, then compare...
         data = [randint(), randint(), randint()]
-        obj = bucket.new('foo2', data)
+        obj = self.bucket.new('foo2', data)
         yield obj.store()
-        del(obj)
+        del obj
 
-        obj = yield bucket.get_binary('foo2')
+        obj = yield self.bucket.get_binary('foo2')
         self.assertEqual(data, json.loads(obj.get_data()))
         log.msg('done binary_store_and_get')
 
     def test_missing_object(self):
         """handle missing objects."""
         log.msg('*** missing_object')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
-        obj = yield bucket.get("missing")
+        obj = yield self.bucket.get("missing")
         self.assertEqual(not obj.exists(), True)
         self.assertEqual(obj.get_data(), None)
         log.msg('done missing_object')
     def test_delete(self):
         """delete objects"""
         log.msg('*** delete')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
         rand = randint()
-        obj = bucket.new('foo', rand)
+        obj = self.bucket.new('foo', rand)
         yield obj.store()
-        obj = yield bucket.get('foo')
+        obj = yield self.bucket.get('foo')
         self.assertEqual(obj.exists(), True)
         yield obj.delete()
         yield obj.reload()
     def test_set_bucket_properties(self):
         """manipulate bucket properties"""
         log.msg('*** set_bucket_properties')
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
         # Test setting allow mult...
-        yield bucket.set_allow_multiples(True)
-        is_multiples = yield bucket.get_allow_multiples()
+        yield self.bucket.set_allow_multiples(True)
+        is_multiples = yield self.bucket.get_allow_multiples()
         self.assertEqual(is_multiples, True)
         # Test setting nval...
-        yield bucket.set_n_val(3)
-        n_val = yield bucket.get_n_val()
+        yield self.bucket.set_n_val(3)
+        n_val = yield self.bucket.get_n_val()
         self.assertEqual(n_val, 3)
         # Test setting multiple properties...
-        yield bucket.set_properties({"allow_mult": False, "n_val": 2})
-        is_multiples = yield bucket.get_allow_multiples()
-        n_val = yield bucket.get_n_val()
+        yield self.bucket.set_properties({"allow_mult": False, "n_val": 2})
+        is_multiples = yield self.bucket.get_allow_multiples()
+        n_val = yield self.bucket.get_n_val()
         self.assertEqual(is_multiples, False)
         self.assertEqual(n_val, 2)
         log.msg('done set_bucket_properties')
         log.msg('*** siblings')
 
         # Set up the bucket, clear any existing object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket('multiBucket')
-        yield bucket.set_allow_multiples(True)
-        obj = yield bucket.get('foo')
+        yield self.bucket.set_allow_multiples(True)
+        obj = yield self.bucket.get('foo')
         yield obj.delete()
 
-        obj = yield bucket.get('foo')
+        obj = yield self.bucket.get('foo')
         self.assertEqual(obj.exists(), False)
 
         # Store the same object multiple times...
             # on each pass and it must have a different client_id.
             # calling RiakClient without params uses randomly-generate id.
             client = riak.RiakClient()
-            bucket = client.bucket('multiBucket')
-            obj = bucket.new('foo', randint())
-            yield obj.store()
+            bucket = client.bucket(self.bucket_name)
+            yield bucket.new('foo', randint()).store()
 
         # Make sure the object has 5 siblings...
+        yield obj.reload()
         self.assertEqual(obj.has_siblings(), True)
         self.assertEqual(obj.get_sibling_count(), 5)
 
         """javascript mapping"""
         log.msg('*** javascript_source_map')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo", 2)
+        obj = self.bucket.new("foo", 2)
         yield obj.store()
         # Run the map...
-        job = client \
-                .add("bucket", "foo") \
+        job = self.client \
+                .add(self.bucket_name, "foo") \
                 .map("function (v) { return [JSON.parse(v.values[0].data)]; }")
         result = yield job.run()
         self.assertEqual(result, [2])
         """javascript mapping with named map"""
         log.msg('*** javascript_named_map')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo", 2)
+        obj = self.bucket.new("foo", 2)
         yield obj.store()
         # Run the map...
-        job = client \
-                .add("bucket", "foo") \
+        job = self.client \
+                .add(self.bucket_name, "foo") \
                 .map("Riak.mapValuesJson")
         result = yield job.run()
         self.assertEqual(result, [2])
         """javascript map reduce"""
         log.msg('*** javascript_source_map_reduce')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        yield bucket.new("foo", 2).store()
-        yield bucket.new("bar", 3).store()
-        yield bucket.new("baz", 4).store()
+        yield self.bucket.new("foo", 2).store()
+        yield self.bucket.new("bar", 3).store()
+        yield self.bucket.new("baz", 4).store()
         # Run the map...
-        job = client \
-                .add("bucket", "foo") \
-                .add("bucket", "bar") \
-                .add("bucket", "baz") \
+        job = self.client \
+                .add(self.bucket_name, "foo") \
+                .add(self.bucket_name, "bar") \
+                .add(self.bucket_name, "baz") \
                 .map("function (v) { return [1]; }") \
-                .reduce("function(v) { return [v.length]; } ")
+                .reduce(JAVASCRIPT_SUM)
         result = yield job.run()
         self.assertEqual(result, [3])
         log.msg('done javascript_source_map_reduce')
         """javascript map reduce by name"""
         log.msg('*** javascript_named_map_reduce')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        yield bucket.new("foo", 2).store()
-        yield bucket.new("bar", 3).store()
-        yield bucket.new("baz", 4).store()
+        yield self.bucket.new("foo", 2).store()
+        yield self.bucket.new("bar", 3).store()
+        yield self.bucket.new("baz", 4).store()
         # Run the map...
-        job = client \
-                .add("bucket", "foo") \
-                .add("bucket", "bar") \
-                .add("bucket", "baz") \
+        job = self.client \
+                .add(self.bucket_name, "foo") \
+                .add(self.bucket_name, "bar") \
+                .add(self.bucket_name, "baz") \
                 .map("Riak.mapValuesJson") \
                 .reduce("Riak.reduceSum")
         result = yield job.run()
     def test_javascript_key_filter_map_reduce(self):
         """javascript map/reduce using key filters"""
         log.msg("javascript map reduce with key filter")
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        yield bucket.new("foo", 2).store()
-        yield bucket.new("bar", 3).store()
-        yield bucket.new("baz", 4).store()
+        yield self.bucket.new("foo", 2).store()
+        yield self.bucket.new("bar", 3).store()
+        yield self.bucket.new("baz", 4).store()
         # Run the map...
-        job = client \
-                .add({"bucket": "bucket",
+        job = self.client \
+                .add({"bucket": self.bucket_name,
                       "key_filters": [["starts_with", "ba"]]}) \
                 .map("function (v) { return [1]; }") \
-                .reduce(
-            "function(v) { if(v.length) return [v.length]; else return []} ")
+                .reduce(JAVASCRIPT_SUM)
         result = yield job.run()
         self.assertEqual(result, [2])
         log.msg('done javascript_key_filter_map_reduce')
         """javascript bucket map reduce"""
         log.msg('*** javascript_bucket_map_reduce')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket_name = "bucket_%s" % randint()
-        bucket = client.bucket(bucket_name)
-        bucket.new("foo", 2).store()
-        bucket.new("bar", 3).store()
-        bucket.new("baz", 4).store()
+        yield self.bucket.new("foo", 2).store()
+        yield self.bucket.new("bar", 3).store()
+        yield self.bucket.new("baz", 4).store()
         # Run the map...
-        job = client \
-                .add(bucket.get_name()) \
+        job = self.client \
+                .add(self.bucket_name) \
                 .map("Riak.mapValuesJson") \
                 .reduce("Riak.reduceSum")
         result = yield job.run()
         """javascript arguments map reduce"""
         log.msg('*** javascript_arg_map_reduce')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo", 2)
-        obj.store()
+        obj = self.bucket.new("foo", 2)
+        yield obj.store()
         # Run the map...
-        job = client \
-                .add("bucket", "foo", 5) \
-                .add("bucket", "foo", 10) \
-                .add("bucket", "foo", 15) \
-                .add("bucket", "foo", -15) \
-                .add("bucket", "foo", -5) \
+        job = self.client \
+                .add(self.bucket_name, "foo", 5) \
+                .add(self.bucket_name, "foo", 10) \
+                .add(self.bucket_name, "foo", 15) \
+                .add(self.bucket_name, "foo", -15) \
+                .add(self.bucket_name, "foo", -5) \
                 .map("function(v, arg) { return [arg]; }") \
                 .reduce("Riak.reduceSum")
         result = yield job.run()
         log.msg('done javascript_arg_map_reduce')
 
     @defer.inlineCallbacks
-    def test_javascript_source_map(self):
+    def test_javascript_manual_map(self):
         """manual javascript mapping"""
         log.msg('*** manual javascript_source_map')
 
         # create something to find
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket(BUCKET)
-        data = 'blueprint'
-        obj = bucket.new('blue_foo1', data)
+        obj = self.bucket.new('blue_foo1', 'blueprint')
         yield obj.store()
 
-        job = client.set_mapreduce(
+        job = self.client.set_mapreduce(
 """
 {"inputs": "%(bucket)s",
  "query":[{"map":{"language": "javascript",
                  }"},
            },
           ]}
-""" % dict(bucket=BUCKET
-          ))
+""" % dict(bucket=self.bucket_name))
 
         result = yield job.run()
         self.assertEqual([u'blue_foo1', ], result)
         """erlang map reduce"""
         log.msg('*** erlang_map_reduce')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo", 2)
+        obj = self.bucket.new("foo", 2)
         yield obj.store()
 
-        obj = bucket.new("bar", 2)
+        obj = self.bucket.new("bar", 2)
         yield obj.store()
 
-        obj = bucket.new("baz", 4)
+        obj = self.bucket.new("baz", 4)
         yield obj.store()
 
         # Run the map...
-        job = client \
-                .add("bucket", "foo") \
-                .add("bucket", "bar") \
-                .add("bucket", "baz") \
+        job = self.client \
+                .add(self.bucket_name, "foo") \
+                .add(self.bucket_name, "bar") \
+                .add(self.bucket_name, "baz") \
                 .map(["riak_kv_mapreduce", "map_object_value"]) \
                 .reduce(["riak_kv_mapreduce", "reduce_set_union"])
         result = yield job.run()
         """map reduce from an object"""
         log.msg('*** map_reduce_from_object')
         # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        bucket.new("foo", 2).store()
-        obj = yield bucket.get("foo")
+        yield self.bucket.new("foo", 2).store()
+        obj = yield self.bucket.get("foo")
         job = obj.map("Riak.mapValuesJson")
         result = yield job.run()
         self.assertEqual(result, [2])
         # so there's now something wrong with link storage.
         log.msg('*** store_and_get_links')
 
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj = bucket.new("foo", 2) \
-                .add_link(bucket.new("foo1")) \
-                .add_link(bucket.new("foo2"), "tag") \
-                .add_link(bucket.new("foo3"), "tag2!@#%^&*)")
+        obj = self.bucket.new("foo", 2) \
+                .add_link(self.bucket.new("foo1")) \
+                .add_link(self.bucket.new("foo2"), "tag") \
+                .add_link(self.bucket.new("foo3"), "tag2!@#%^&*)")
         yield obj.store()
-        del(obj)
+        del obj
 
         log.msg("Get the Links")
-        obj = yield bucket.get("foo")
+        obj = yield self.bucket.get("foo")
         links = obj.get_links()
         self.assertEqual(len(links), 3)
         log.msg('done store_and_get_links')
     def test_link_walking(self):
         """walk links"""
         log.msg('*** link_walking')
-        # Create the object...
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-        obj_1 = bucket.new("foo1", "test1")
+        obj_1 = self.bucket.new("foo1", "test1")
         yield obj_1.store()
 
-        obj_2 = bucket.new("foo2", "test2")
+        obj_2 = self.bucket.new("foo2", "test2")
         yield obj_2.store()
 
-        obj_3 = bucket.new("foo3", "test3")
+        obj_3 = self.bucket.new("foo3", "test3")
         yield obj_3.store()
 
-        obj = bucket.new("foo", 2) \
+        obj = self.bucket.new("foo", 2) \
                 .add_link(obj_1) \
                 .add_link(obj_2, "tag") \
                 .add_link(obj_3, "tag2!@#%^&*)")
         yield obj.store()
-        obj = yield bucket.get("foo")
-        job = obj.link("bucket")
+        obj = yield self.bucket.get("foo")
+        job = obj.link(self.bucket_name)
         results = yield job.run()
         self.assertEqual(len(results), 3)
-        results = yield obj.link("bucket", "tag").run()
+        results = yield obj.link(self.bucket_name, "tag").run()
         self.assertEqual(len(results), 1)
         log.msg('done link_walking')
 
         key = "foo1"
         key_data = "test1"
 
-        # set up the bucket
-        client = riak.RiakClient(client_id=RIAK_CLIENT_ID)
-        bucket = client.bucket("bucket")
-
         # be sure object is deleted before we start
-        obj = yield bucket.get(key)
+        obj = yield self.bucket.get(key)
         yield obj.delete()
 
         # now get a fresh new one
-        obj = bucket.new(key, key_data)
+        obj = self.bucket.new(key, key_data)
 
         # see we can store and get back a header
         meta_key = 'this-is-a-test'