Source

AdaCode / irelogs.py

#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
This script collects orglogs from achaea so we get to keep a copy before they're purged
Ref: https://bitbucket.org/adayoung/adacode/src/master/irelogs.py
"""

import sys
import json
import urllib2
import sqlite3
import optparse
from urllib import urlencode
from datetime import datetime

browser = urllib2.build_opener()
browser.addheaders = [
		('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
		('Accept-Language', 'en-US,en;q=0.5'),
		('Connection', 'keep-alive'),
		('User-agent', '~*Ada\'s pretty script for getting orglogs*~ (Please see https://bitbucket.org/adayoung/adacode/src/master/irelogs.py)'),
	]

api_server = "http://api.achaea.com"
orglogs_endpoint = "/orglogs.json"

db = "A global object which we'll instantiate in main()"
cursor = "A global object which we'll instantiate in main()"

def createdb():
	"""
	createdb() -> None
	This function creates the necessary tables for storing orglogs
	"""
	cursor.execute("PRAGMA foreign_keys=ON")
	cursor.execute("""
	CREATE TABLE IF NOT EXISTS orgs (
		id INTEGER PRIMARY KEY AUTOINCREMENT,
		orgname TEXT UNIQUE
	)""")
	cursor.execute("""
	CREATE TABLE IF NOT EXISTS org_logs (
		id INTEGER PRIMARY KEY AUTOINCREMENT,
		timestamp TEXT,
		message TEXT,
		log_type TEXT,
		orgid INTEGER REFERENCES orgs(id) ON DELETE CASCADE
	)""")
	cursor.execute("""
	CREATE INDEX IF NOT EXISTS logs_index ON org_logs (orgid, timestamp, message)
	""")
	db.commit()

def ProcessOrg(orgname):
	"""
	ProcessOrg(orgname) -> orgid
	This function returns an orgid for the given orgname while adding the
	orgname to the database if it doesn't already exists
	"""
	cursor.execute("SELECT id FROM orgs WHERE orgname=?", (orgname, ))
	results = cursor.fetchall()
	if not len(results) > 0:
		cursor.execute("INSERT INTO orgs (orgname) VALUES (?)", (orgname, ))
		cursor.execute("SELECT last_insert_rowid()")
		results = cursor.fetchone()
		return results[0]
	else:
		return results[0][0]

def ProcessLogs(lognotes, orgid, log_type="General"):
	"""
	ProcessLogs(lognotes, orgid, log_type) -> None
	This function adds a list of lognotes for the given orgid to the database,
	the log_type is "General" for now
	"""
	import_map = []
	for i in lognotes:
		splitlog = i.split('-')
		timestamp = "-".join(splitlog[:3])
		message = " ".join(splitlog[3:])[1:]
		cursor.execute("SELECT * FROM org_logs l join orgs o on l.orgid=o.id WHERE o.id=? AND l.timestamp=datetime(?) AND l.message=? AND l.log_type=?", (orgid, timestamp, message, log_type))
		results = cursor.fetchall()
		if not len(results) > 0:
			import_map.append((orgid, timestamp, message, log_type))
	cursor.executemany("INSERT INTO org_logs (orgid, timestamp, message, log_type) VALUES (?, datetime(?), ?, ?)", import_map)
	db.commit()

def http_open(link):
	"""
	http_open(link) -> JSON object
	This function fetches the given link and returns a json object
	"""
	print("Fetching: %s" % link)
	try:
		data = browser.open(link)
		data = data.read()
	except urllib2.HTTPError:
		print("Encountered HTTPError for %s" % link, sys.exc_info())
		print("Setting empty json object -> []")
		data = "[]"
	except:
		print("Error while opening %s" % link, sys.exc_info())
		exit()

	try:
		data = json.loads(data)
	except:
		print("Error decoding JSON object from %s" % link, sys.exc_info())
		print("Setting empty JSON object -> []")
		data = "[]"
		data = json.loads(data)
	return data

def orglogs_summary(character, password, days=0):
	param_string = urlencode({
			"character" : character,
			"password" : password,
			"day" : days
		})
	orglogs = "%s%s" % (api_server, orglogs_endpoint)
	dataset = http_open("%s?%s" % (orglogs, param_string))

	orglogs = {}
	for i in dataset:
		orgname = i.get('name', 'NONAME')
		# if i.get('count', 0) > 0: # just get all the uris, the api server is broken
		orglogs[orgname] = "%s?%s" % (i.get('uri', ''), param_string)

	return orglogs

def process_orglogs(uri_set):
	for i in uri_set.keys():
		orgname = i
		orgid = ProcessOrg(orgname)

		dataset = http_open(uri_set[i])

		lognotes = []
		for i in dataset:
			timestamp = i.get('date', 0)
			timestamp = datetime.fromtimestamp(int(timestamp)).strftime("%Y-%m-%d %H:%M:%S")

			lognote = i.get('event', '')
			lognotes.append("%s - %s" % (timestamp, lognote))

		ProcessLogs(lognotes, orgid)

def main():
	# options parser be here
	parser = optparse.OptionParser()
	parser.add_option('-c', '--charname', help='Your Achaea character name')
	parser.add_option('-p', '--password', help='Your Achaea password')
	parser.add_option('-d', '--days', help='Number of days to go back (max: 6)')
	parser.add_option('-D', '--database', help="The database file")
	(opts, args) = parser.parse_args()

	if opts.database is None:
		print("You need to specify a database file\n")
		parser.print_help()
		exit(-1)
	elif opts.charname is None:
		print("You need to specify a charname\n")
		parser.print_help()
		exit(-1)
	elif opts.password is None:
		print("You need to specify a password\n")
		parser.print_help()
		exit(-1)

	charname = opts.charname
	password = opts.password
	days = opts.days or 0

	# init db if it isn't there yet
	global cursor
	global db

	DBFile = opts.database
	db = sqlite3.connect(DBFile)
	cursor = db.cursor()
	createdb()

	dataset = orglogs_summary(charname, password, days)
	dataset = process_orglogs(dataset)

	db.close()

if __name__ == "__main__":
	main()