Commits

Gregory Petukhov committed ad0fdbb

Add postgres test

  • Participants
  • Parent commits ab9682f

Comments (0)

Files changed (1)

 #!/usr/bin/env python
-import MySQLdb
 from random import randint, shuffle
 import time
 from hashlib import sha1
-import pymongo
 import os
-import tc
 import marshal
 import zlib
 from bson import Binary
 import sys
 
-# Mysql connection
-conn = MySQLdb.connect(user='web', passwd='web-**')
-
-# Mongodb connection
-mongo = pymongo.Connection()['dbtest']
-
 # Number of read/write operations
-NUMBER = 100
+NUMBER = 2000
 
 # Size of data of one record in database
 #DATA_SIZE = 1000
     {'key': 'mysql', 'enable': 1, 'name': 'MySQL'},
     {'key': 'mongo', 'enable': 1, 'name': 'MongoDB'},
     {'key': 'tc', 'enable': 1, 'name': 'Tokyo Cabinet'},
+    {'key': 'postgres', 'enable': 0, 'name': 'Postgres'},
 ]
 
 
-def mysql_setup_database():
-    cur = conn.cursor()
-    cur.execute('drop database dbtest')
-    cur.execute('create database dbtest charset utf8')
-    cur.execute('use dbtest')
-    cur.execute('''
-        create table cache (
-            id int auto_increment primary key,
-            hash varchar(40),
-            data blob,
-            unique (hash)
-        ) engine = myisam
-    ''')
-
-
 def random_data(number, only_hash=False):#, data_size=DATA_SIZE):
     _hash = sha1(str(number)).hexdigest()
     if only_hash:
     return time.time() - ts
 
 
+def mysql_setup_database():
+    import MySQLdb
+
+    conn = MySQLdb.connect(user='web', passwd='web-**')
+    cur = conn.cursor()
+    cur.execute('drop database dbtest')
+    cur.execute('create database dbtest charset utf8')
+    cur.execute('use dbtest')
+            #id int auto_increment primary key,
+            #unique (hash)
+    cur.execute('''
+        create table cache (
+            id varchar(40) primary key,
+            data blob not null
+        ) engine = myisam
+    ''')
+
+
 def mysql_write():
+    import MySQLdb
+
+    conn = MySQLdb.connect(user='web', passwd='web-**')
     cur = conn.cursor()
+    cur.execute('use dbtest')
     for x in xrange(NUMBER):
         _hash, data = random_data(x)
         cur.execute('''
-            insert into cache (hash, data) values(%s, %s)
+            insert into cache (id, data) values(%s, %s)
         ''', (_hash, data))
         yield 1
 
 
 def mysql_read(hash_list):
+    import MySQLdb
+
+    conn = MySQLdb.connect(user='web', passwd='web-**')
     cur = conn.cursor()
+    cur.execute('use dbtest')
     #cur.execute('load index into cache `cache`')
     for _hash in hash_list:
         cur.execute('''
-            select id, hash, data from cache
-            where hash = %s
+            select id, data from cache
+            where id = %s
         ''', (_hash,))
-        _id, _hash, data = cur.fetchone()
+        _hash, data = cur.fetchone()
         page = parse_page(data)
         assert len(_hash) == 40
         assert len(page['body']) == len(DATA)
         yield 1
 
 
+def postgres_setup_database():
+    import psycopg2
+
+    conn = psycopg2.connect(user='lorien', database='dbtest')
+    cur = conn.cursor()
+    cur.execute('drop table if exists cache')
+    cur.execute('''
+        create table cache (
+            id bytea not null,
+            data bytea not null,
+            primary key (id)
+        );
+    ''')
+    conn.commit()
+
+
+def postgres_write():
+    import psycopg2
+
+    conn = psycopg2.connect(user='lorien', database='dbtest')
+    cur = conn.cursor()
+    for x in xrange(NUMBER):
+        _hash, data = random_data(x)
+        cur.execute('''
+            insert into cache (id, data) values(%s, %s)
+        ''', (_hash, psycopg2.Binary(data)))
+        conn.commit()
+        yield 1
+
+
+def postgres_read(hash_list):
+    import psycopg2
+
+    conn = psycopg2.connect(user='lorien', database='dbtest')
+    cur = conn.cursor()
+    for _hash in hash_list:
+        cur.execute('''
+            select id, data from cache
+            where id = %s
+        ''', (_hash,))
+        _hash, data = cur.fetchone()
+        page = parse_page(data)
+        assert len(_hash) == 40
+        assert len(page['body']) == len(DATA)
+        assert page['body'].startswith(DATA[:100])
+        yield 1
+
+
 def mongo_setup_database():
-    mongo.cache.drop()
+    import pymongo
+
+    db = pymongo.Connection()['dbtest']
+    db.cache.drop()
 
 
 def mongo_write():
+    import pymongo
+
+    db = pymongo.Connection()['dbtest']
     for x in xrange(NUMBER):
         _hash, data = random_data(x)
-        mongo.cache.save({'_id': _hash, 'data': Binary(data)}, safe=True)
+        db.cache.save({'_id': _hash, 'data': Binary(data)}, safe=True)
         yield 1
 
 
 def mongo_read(hash_list):
+    import pymongo
+
+    db = pymongo.Connection()['dbtest']
     for _hash in hash_list:
-        item = mongo.cache.find_one({'_id': _hash})
+        item = db.cache.find_one({'_id': _hash})
         page = parse_page(item['data'])
         assert len(_hash) == 40
         assert len(page['body']) == len(DATA)
 
 
 def tc_setup_database():
+    import tc
+
     if os.path.exists('var/tc.db'):
         os.unlink('var/tc.db')
 
 
 def tc_write():
+    import tc
+
     db = tc.HDB()
     #db.tune(-1, -1, -1, tc.HDBTDEFLATE)
     db.open('var/tc.db', tc.HDBOWRITER | tc.HDBOCREAT)
 
 
 def tc_read(hash_list):
+    import tc
+
     db = tc.HDB()
     #db.tune(-1, -1, -1, tc.HDBTDEFLATE)
     db.open('var/tc.db', tc.HDBOWRITER | tc.HDBOCREAT)