Commits

Chris Mutel committed eb263f7

9.3: Add weighting and normalization

  • Participants
  • Parent commits b949747

Comments (0)

Files changed (7)

File bw2data/__init__.py

 from serialization import JsonWrapper
 from database import Database
 from method import Method
+from weighting_normalization import Weighting, Normalization
 from query import Query, Filter, Result
 from utils import set_data_dir
 # Don't confuse nose tests
 import validate
 import io
 
-__version__ = (0, 9, 2)
+__version__ = (0, 9, 3)

File bw2data/database.py

 # -*- coding: utf-8 -*-
 from . import databases, config, mapping, geomapping
-from errors import MissingIntermediateData, UnknownObject
-from query import Query
+from .errors import MissingIntermediateData, UnknownObject
+from .query import Query
+from .units import normalize_units
+from .utils import natural_sort, MAX_INT_32, TYPE_DICTIONARY
+from .validate import db_validator
 from time import time
-from units import normalize_units
-from utils import natural_sort, MAX_INT_32, TYPE_DICTIONARY
-from validate import db_validator
 import datetime
 import numpy as np
 import os

File bw2data/meta.py

             self.data)
 
 
+class WeightingMeta(Methods):
+    """A dictionary for weighting metadata. File data is saved in ``methods.json``."""
+    _filename = "weightings.json"
+
+
+class NormalizationMeta(Methods):
+    """A dictionary for normalization metadata. File data is saved in ``methods.json``."""
+    _filename = "normalizations.json"
+
+
 mapping = Mapping()
 databases = Databases()
 methods = Methods()
 geomapping = GeoMapping()
+weightings = WeightingMeta()
+normalizations = NormalizationMeta()
 
 
 def reset_meta():
     databases.__init__()
     methods.__init__()
     geomapping.__init__()
+    weightings.__init__()
+    normalizations.__init__()

File bw2data/validate.py

 # encoding: utf-8
 try:
-    from voluptuous import Schema, required, Invalid
+    from voluptuous import Schema, Required, Invalid, Any
 except:
-    raise ImportError("The voluptuous package is required for validation")
+    raise ImportError("The voluptuous package is Required for validation")
 
 
 def valid_tuple(o):
     "code": object,
     "categories": list or tuple,
     "location": object,
-    required("name"): basestring,
-    required("type"): basestring,
-    required("unit"): basestring,
-    required("exchanges"): [{
-        required("amount"): float,
-        required("input"): valid_tuple,
+    Required("name"): basestring,
+    Required("type"): basestring,
+    Required("unit"): basestring,
+    Required("exchanges"): [{
+        Required("amount"): float,
+        Required("input"): valid_tuple,
         "comment": basestring,
         "code": object,
         "sigma": float,
-        required("uncertainty type"): int,
-        required("type"): basestring,
+        Required("uncertainty type"): int,
+        Required("type"): basestring,
         }]
     }},
     extra=True)
 
 ia_validator = Schema([[valid_tuple, float, object]])
+
+weighting_validator = Schema({
+    Required("amount"): Any(float, int),
+    "uncertainty_type": int,
+    "loc": Any(float, int),
+    "scale": Any(float, int),
+    "shape": Any(float, int),
+    "minimum": Any(float, int),
+    "maximum": Any(float, int)
+})
+
+normalization_validator = Schema([{
+    Required("amount"): Any(float, int),
+    Required("flow"): valid_tuple,
+    "uncertainty_type": int,
+    "loc": Any(float, int),
+    "scale": Any(float, int),
+    "shape": Any(float, int),
+    "minimum": Any(float, int),
+    "maximum": Any(float, int)
+}])

File bw2data/weighting_normalization.py

+from .ia_data_store import ImpactAssessmentDataStore
+from .meta import weightings, mapping, normalizations
+from .utils import MAX_INT_32
+from .validate import weighting_validator, normalization_validator
+import numpy as np
+
+
+class Weighting(ImpactAssessmentDataStore):
+    metadata = weightings
+    label = "weighting"
+
+    def process(self):
+        data = self.load()
+        assert isinstance(data, dict)
+        dtype = [
+            ('uncertainty_type', np.uint8),
+            ('amount', np.float32),
+            ('loc', np.float32),
+            ('scale', np.float32),
+            ('shape', np.float32),
+            ('minimum', np.float32),
+            ('maximum', np.float32),
+            ('negative', np.bool)
+        ]
+        corrected_data = (
+            data.get("uncertainty_type", 0),
+            data["amount"],
+            data.get("loc", np.NaN),
+            data.get("scale", np.NaN),
+            data.get("shape", np.NaN),
+            data.get("minimum", np.NaN),
+            data.get("maximum", np.NaN),
+            data["amount"] < 0
+        )
+        self.write_processed_array(np.array(corrected_data, dtype=dtype))
+
+    def validate(self, data):
+        weighting_validator(data)
+        return True
+
+
+class Normalization(ImpactAssessmentDataStore):
+    metadata = normalizations
+    label = "normalization"
+
+    def process(self):
+        data = self.load()
+        assert data
+        dtype = [
+            ('uncertainty_type', np.uint8),
+            ('flow', np.uint32),
+            ('index', np.uint32),
+            ('amount', np.float32),
+            ('loc', np.float32),
+            ('scale', np.float32),
+            ('shape', np.float32),
+            ('minimum', np.float32),
+            ('maximum', np.float32),
+            ('negative', np.bool)
+        ]
+        array = np.zeros((len(data),), dtype=dtype)
+        for index, row in enumerate(data):
+            array[index] = (
+                row.get("uncertainty_type", 0),
+                mapping[row['flow']],
+                MAX_INT_32,
+                row["amount"],
+                row.get("loc", np.NaN),
+                row.get("scale", np.NaN),
+                row.get("shape", np.NaN),
+                row.get("minimum", np.NaN),
+                row.get("maximum", np.NaN),
+                row["amount"] < 0
+            )
+        self.write_processed_array(array)
+
+    def validate(self, data):
+        normalization_validator(data)
+        return True

File docs/conf.py

 # The short X.Y version.
 version = '0.9'
 # The full version, including alpha/beta/rc tags.
-release = '0.9.2'
+release = '0.9.3'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
 
 setup(
     name='bw2data',
-    version="0.9.2",
+    version="0.9.3",
     packages=packages,
     author="Chris Mutel",
     author_email="cmutel@gmail.com",
     license=open('LICENSE.txt').read(),
-    install_requires=["voluptuous", "progressbar", "numpy", "lxml", "scipy", "requests>=1.1.0", "brightway2", "stats_arrays"],
+    install_requires=[
+        "brightway2",
+        "lxml",
+        "numpy",
+        "progressbar",
+        "requests>=1.1.0",
+        "scipy",
+        "stats_arrays"
+        "voluptuous",
+    ],
     url="https://bitbucket.org/cmutel/brightway2-data",
     long_description=open('README.rst').read(),
     classifiers=[