wdmmg / wdmmg / controllers /

Full commit
import logging

from pylons import config
from pylons import request, response, session, tmpl_context as c, url, app_globals
from pylons.controllers.util import abort, redirect
from pylons.decorators.cache import beaker_cache

from wdmmg.lib.base import BaseController, render
from import jsonpify
import wdmmg.model as model
import wdmmg.lib.aggregator as aggregator
import wdmmg.lib.calculator as calculator

log = logging.getLogger(__name__)
DEBUG = bool(config.get('debug', False))

    import json
except ImportError:
    import simplejson as json

class ApiController(BaseController):
    def index(self):
        c.rest_url = url(controller='rest', action='index')
        # Construct query strings by hand to keep the parameters in an instructive order.
        c.aggregate_url = url(controller='api', action='aggregate') + \
            '?dataset=%s' % app_globals.default_dataset + \
        c.mytax_url = url(controller='api', action='mytax') + \
        c.jsonp_url = '&'.join(c.mytax_url.split('&')[:-2] +
        return render('home/api.html')

    # Consider moving _jsonify and _jsonpify to superclass?
    def _jsonify(self, result):
        response.content_type = 'application/json'
        out = json.dumps(result)
        # Note: pylons will automatically convert to relevant charset.
        return unicode(out)

    def _jsonpify(self, result):
        response.content_type = 'text/javascript'
        return u'%s(%s);' % (request.params['callback'],

    def search(self):
        solrargs = dict(request.params)
        rows = min(1000, request.params.get('rows', 10))
        q = request.params.get('q', '*:*')
        solrargs['q'] = q
        solrargs['rows'] = rows
        solrargs['wt'] = 'json'
        solrargs['sort'] = 'score desc, amount desc'
        if 'callback' in solrargs and not 'json.wrf' in solrargs:
            solrargs['json.wrf'] = solrargs['callback']
        if not 'sort' in solrargs:
            solrargs['sort'] = 'score desc,amount desc'
        query = app_globals.solr.raw_query(**solrargs)
        response.content_type = 'application/json'
        return query

    def aggregate(self):
        # delete the callback param if it exists so that we don't cache on it
        params = dict(request.params)
        if 'callback' in params:
            del params['callback']
        # this is weird but we cannot do **kwargs type stuff because we have
        # keys of form 'breakdown-from'
        # and caching based on a single dictionary arg will not work well
        keys = sorted(params.keys()) # sort keys alphabetically - why?
        values = [ params[k] for k in keys ]
        return self._aggregate(keys, values)
        invalidate_on_startup=DEBUG, # So we can still develop.
        expire=864000, # 10 days.
    def _aggregate(self, keys, values):
        aggregator_params = dict(zip(keys,values))
        if aggregator_params.get('slice'):
            id = aggregator_params.get('slice')
        elif aggregator_params.get('dataset'):
            id = aggregator_params.get('dataset')
            raise Exception, "Dataset not defined"
        dataset = model.Dataset.by_id(id)
        assert dataset, "Dataset %s not found" % id
        # Retrieve request parameters of the form "verb-key=value"
        include, axes, per, per_time = [], [], [], []
        # Resort the dictionary (since we sorted the keys)
        # FIXME: key ordering should *not* have to be alphabetical
        it = iter(sorted(aggregator_params.iteritems()))
        for i in it:
            param = i[0]
            value = i[1]
            if param.startswith('exclude-'):
            elif param.startswith('include-'):
                include.append((param[8:], value))
            elif param.startswith('breakdown-'):
                axes.append(param[10:]) # Value ignored (e.g. "yes").
                # keys for breakdown get added to 'axes'
            elif param.startswith('per-'):
                if value and value != 'time':
                    per.append((value, param[4:]))
                    name = param[4:]
                    assert name in aggregator.time_series, value # FIXME: Nicer error message needed.
            # TODO: Other verbs?
            elif param not in ('dataset', 'slice', 'start_date', 'end_date', 'callback'):
                  detail='Unknown request parameter: %s'%param)
        results = aggregator.aggregate(
        for axis, statistic in per:
            results.divide_by_statistic(axis, statistic)
        for statistic_name in per_time:
        ans = {
            'metadata': {
                'include': include,
                'dates': [unicode(d) for d in results.dates],
                'axes': results.axes,
                'per': per,
                'per_time': per_time
            'results': results.matrix.items(),
        return ans

    def mytax(self):
        def float_param(name, required=False):
            if name not in request.params:
                if required:
                    abort(status_code=400, detail='parameter %s is missing'%name)
                return None
            ans = request.params[name]
                return float(ans)
            except ValueError:
                abort(status_code=400, detail='%r is not a number'%ans)
        def bool_param(name, required=False):
            if name not in request.params:
                if required:
                    abort(status_code=400, detail='parameter %s is missing'%name)
                return None
            ans = request.params[name].lower()
            if ans=='yes': return True
            elif ans=='no': return False
            else: abort(status_code=400, detail='%r is not %r or %r'%(ans, 'yes', 'no'))
        tax, explanation = calculator.TaxCalculator2010().total_tax(
            float_param('income', required=True),
        result = {'tax': tax, 'explanation': explanation}
        if 'callback' in request.params:
            return self._jsonpify(result)
            return self._jsonify(result)