def test_get_activity_peewee(self): database = Database("a database", "sqlite") database.write( { ("a database", "foo"): { "exchanges": [ { "input": ("a database", "foo"), "amount": 1, "type": "production", } ], "location": "bar", "name": "baz", }, } ) self.assertTrue(isinstance(get_activity(("a database", "foo")), PWActivity))
def create_new_biosphere(self, biosphere_name, relink=True): """Create new biosphere database from biosphere flows in ``self.data``. Links all biosphere flows to new bio database if ``relink``.""" assert biosphere_name not in databases, \ u"{} database already exists".format(biosphere_name) print(u"Creating new biosphere database: {}".format(biosphere_name)) with warnings.catch_warnings(): warnings.simplefilter("ignore") new_bio = Database(biosphere_name, backend='singlefile') new_bio.register( format=self.format, comment="New biosphere created by LCI import" ) KEYS = {'name', 'unit', 'categories'} def reformat(exc): dct = {key: value for key, value in list(exc.items()) if key in KEYS} dct.update( type = 'emission', exchanges = [], database = biosphere_name, code = activity_hash(dct) ) return dct bio_data = [reformat(exc) for ds in self.data for exc in ds.get('exchanges', []) if exc['type'] == 'biosphere'] bio_data = {(ds['database'], ds['code']): ds for ds in bio_data} new_bio.write(bio_data) if relink: self.apply_strategies([ functools.partial(link_iterable_by_fields, other=list(bio_data.values()), relink=True), ])
def add_missing_cfs(self): new_flows = [] for method in self.data: for cf in method["exchanges"]: if "input" not in cf: cf["code"] = str(uuid.uuid4()) new_flows.append(cf) new_flows = recursive_str_to_unicode( dict([self._format_flow(cf) for cf in new_flows])) if new_flows: biosphere = Database(self.biosphere_name) biosphere_data = biosphere.load() biosphere_data.update(new_flows) biosphere.write(biosphere_data) print(u"Added {} new biosphere flows".format(len(new_flows)))
def test_with_ecoinvent_specific_shortcut(): assert not len(Database('animals')) animal_data = { ('animals', "6ccf7e69afcf1b74de5b52ae28bbc1c2"): { 'name': 'dogs', 'reference product': 'dog', 'exchanges': [], 'unit': 'kilogram', 'location': 'RoW', }, } db = Database('animals') db.write(animal_data) rwr = rower.Rower('animals') rwr.apply_existing_activity_map(rwr.EI_3_4_CONSEQUENTIAL) assert get_activity( ('animals', "6ccf7e69afcf1b74de5b52ae28bbc1c2"))['location'] == "RoW_64"
def add_unlinked_flows_to_biosphere_database(self, biosphere_name=None): biosphere_name = biosphere_name or config.biosphere assert biosphere_name in databases, u"{} biosphere database not found".format( biosphere_name ) bio = Database(biosphere_name) KEYS = {"name", "unit", "categories"} def reformat(exc): dct = {key: value for key, value in list(exc.items()) if key in KEYS} dct.update( type="emission", exchanges=[], code=activity_hash(dct), database=biosphere_name, ) return dct new_data = [ reformat(exc) for ds in self.data for exc in ds.get("exchanges", []) if exc["type"] == "biosphere" and not exc.get("input") ] data = bio.load() # Dictionary eliminate duplicates data.update({(biosphere_name, activity_hash(exc)): exc for exc in new_data}) bio.write(data) self.apply_strategy( functools.partial( link_iterable_by_fields, other=( obj for obj in Database(biosphere_name) if obj.get("type") == "emission" ), kind="biosphere", ), )
def add_unlinked_flows_to_new_biosphere_database(self, biosphere_name=None): biosphere_name = biosphere_name or self.db_name + " biosphere" db = Database(biosphere_name) data = {(biosphere_name, o["exiobase name"]): { "name": o["exiobase name"], "unit": o["exiobase unit"], "categories": (o["ecoinvent category"], ), "comment": o["comment"], "exchanges": [], } for o in self.biosphere_correspondence if o["new flow"]} if biosphere_name not in databases: db.register(format="EXIOBASE 3 New Biosphere", filepath=str(self.dirpath)) db.write(data) return biosphere_name
def create_biosphere(self): db = Database("biosphere") data = { ("biosphere", "oxygen"): { "name": "oxygen", "unit": "kilogram", "type": "emission", }, ("biosphere", "argon"): { "name": "argon", "unit": "kilogram", "type": "emission", }, ("biosphere", "nitrogen"): { "name": "nitrogen", "unit": "kilogram" }, } db.register() db.write(data)
def create_biosphere(self): db = Database("biosphere") data = { ('biosphere', 'oxygen'): { 'name': 'oxygen', 'unit': 'kilogram', 'type': 'emission', }, ('biosphere', 'argon'): { 'name': 'argon', 'unit': 'kilogram', 'type': 'emission', }, ('biosphere', 'nitrogen'): { 'name': 'nitrogen', 'unit': 'kilogram' }, } db.register() db.write(data)
def test_get_activity_singlefile(self): database = Database("a database", "singlefile") database.write({ ("a database", "foo"): { 'exchanges': [{ 'input': ("a database", "foo"), 'amount': 1, 'type': 'production', }], 'location': 'bar', 'name': 'baz' }, }) self.assertTrue(isinstance( get_activity(("a database", "foo")), SFActivity )) self.assertTrue(isinstance( Database.get(("a database", "foo")), SFActivity ))
def test_combine_methods(self): d = Database("biosphere") d.register(depends=[]) d.write(biosphere) m1 = Method(("test method 1",)) m1.register(unit="p") m1.write([ (("biosphere", 1), 1, "GLO"), (("biosphere", 2), 2, "GLO") ]) m2 = Method(("test method 2",)) m2.register(unit="p") m2.write([ (("biosphere", 2), 10, "GLO") ]) combine_methods(("test method 3",), ("test method 1",), ("test method 2",)) cm = Method(("test method 3",)) self.assertEqual(sorted(cm.load()), [ (("biosphere", 1), 1, "GLO"), (("biosphere", 2), 12, "GLO") ]) self.assertEqual(methods[["test method 3"]]["unit"], "p")
def test_merge_databases_wrong_backend(): first = Database("a database", "iotable") first.write( { ("a database", "foo"): { "exchanges": [ {"input": ("a database", "foo"), "amount": 1, "type": "production",} ], "location": "bar", "name": "baz", }, }, [], [], [], ) second = Database("another database") second.write( { ("another database", "bar"): { "exchanges": [ { "input": ("another database", "bar"), "amount": 1, "type": "production", } ], "location": "bar", "name": "baz", }, } ) with pytest.raises(ValidityError): merge_databases("a database", "another database") with pytest.raises(ValidityError): merge_databases("another database", "a database")
def basic(): assert not len(Database('animals')) animal_data = { ('animals', 'food'): { 'name': 'food', 'exchanges': [{ 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'production' }], 'unit': 'kilogram', 'location': 'GLO', 'reference product': 'food', }, ('animals', 'german_shepherd'): { 'name': 'dogs', 'reference product': 'dog', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'german_shepherd'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'DE', }, ('animals', 'pug'): { 'name': 'dogs', 'reference product': 'dog', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'pug'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'CN', }, ('animals', 'mutt'): { 'name': 'dogs', 'reference product': 'dog', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'mutt'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'RoW', }, ('animals', 'german_shepherd pup'): { 'name': 'dogs', 'reference product': 'puppy', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'german_shepherd pup'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'DE', }, ('animals', 'pug pup'): { 'name': 'dogs', 'reference product': 'puppy', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'pug pup'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'CN', }, ('animals', 'mutt pup'): { 'name': 'dogs', 'reference product': 'puppy', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'mutt pup'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'RoW', }, ('animals', 'persian'): { 'name': 'cats', 'reference product': 'cat', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'persian'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'IR', }, ('animals', 'moggy'): { 'name': 'cats', 'reference product': 'cat', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'moggy'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'RoW', }, ('animals', 'hamster'): { 'name': 'hamster', 'reference product': 'hamster', 'exchanges': [ { 'amount': 1.0, 'input': ('animals', 'food'), 'type': 'technosphere' }, { 'amount': 1.0, 'input': ('animals', 'hamster'), 'type': 'production' }, ], 'unit': 'kilogram', 'location': 'GLO', }, } db = Database('animals') db.write(animal_data) return db
def test_match_subcategories(self): self.maxDiff = None background = [ { "categories": ("air", "non-urban air or from high stacks"), "code": "first", "database": "b", "exchanges": [], "name": "Boron trifluoride", "type": "emission", "unit": "kilogram", }, { "categories": ("air", "low population density, long-term"), "code": "second", "database": "b", "exchanges": [], "name": "Boron trifluoride", "type": "emission", "unit": "kilogram", }, { "categories": ("air", "lower stratosphere + upper troposphere"), "code": "third", "database": "b", "exchanges": [], "name": "Boron trifluoride", "type": "emission", "unit": "kilogram", }, { # Skip - root category "categories": ("air",), "code": "fourth", "database": "b", "exchanges": [], "name": "Boron trifluoride", "type": "emission", "unit": "kilogram", }, { # Should be skipped - wrong type "categories": ("air", "skip me"), "code": "Bill. My friends just call me Bill.", "database": "b", "exchanges": [], "name": "Boron trifluoride", "type": "something else", "unit": "kilogram", }, ] db = Database("b") db.register() db.write({(obj["database"], obj["code"]): obj for obj in background}) data = [ { "name": "Some LCIA method", "exchanges": [ { "name": "Boron trifluoride", "categories": ("air",), "unit": "kilogram", # Only for CFs - no need for biosphere filter # 'type': 'biosphere', "amount": 1, }, { "name": "Boron trifluoride", "categories": ("air", "lower stratosphere + upper troposphere"), "unit": "kilogram", "amount": 0, }, ], } ] expected = [ { "name": "Some LCIA method", "exchanges": [ { "name": "Boron trifluoride", "categories": ("air",), "unit": "kilogram", "amount": 1, }, { # Not linked - already has subcategories "categories": ("air", "lower stratosphere + upper troposphere"), "name": "Boron trifluoride", "unit": "kilogram", "amount": 0, }, { "categories": ("air", "low population density, long-term"), "database": "b", "name": "Boron trifluoride", "unit": "kilogram", "input": ("b", "second"), "amount": 1, }, { "amount": 1, "categories": ("air", "non-urban air or from high stacks"), "database": "b", "input": ("b", "first"), "name": "Boron trifluoride", "unit": "kilogram", }, ], } ] answer = match_subcategories(data, "b", remove=False) self.assertEqual(expected, answer)
def get_lca(self): db = Database("a") db.write(lci_fixture) lca = LCA({("a", "2"): 1}) lca.lci() return lca
def write_database(self, data=None, delete_existing=True, backend=None, activate_parameters=False, **kwargs): """ Write data to a ``Database``. All arguments are optional, and are normally not specified. ``delete_existing`` effects both the existing database (it will be emptied prior to writing if True, which is the default), and, if ``activate_parameters`` is True, existing database and activity parameters. Database parameters will only be deleted if the import data specifies a new set of database parameters (i.e. ``database_parameters`` is not ``None``) - the same is true for activity parameters. If you need finer-grained control, please use the ``DatabaseParameter``, etc. objects directly. Args: * *data* (dict, optional): The data to write to the ``Database``. Default is ``self.data``. * *delete_existing* (bool, default ``True``): See above. * *activate_parameters* (bool, default ``False``). Instead of storing parameters in ``Activity`` and other proxy objects, create ``ActivityParameter`` and other parameter objects, and evaluate all variables and formulas. * *backend* (string, optional): Storage backend to use when creating ``Database``. Default is the default backend. Returns: ``Database`` instance. """ data = self.data if data is None else data self.metadata.update(kwargs) if activate_parameters: # Comes before .write_database because we # need to remove `parameters` key activity_parameters = self._prepare_activity_parameters( data, delete_existing ) if {o['database'] for o in data} != {self.db_name}: error = "Activity database must be {}, but {} was also found".format( self.db_name, {o['database'] for o in data}.difference({self.db_name}) ) raise WrongDatabase(error) if len({o['code'] for o in data}) < len(data): seen, duplicates = set(), [] for o in data: if o['code'] in seen: duplicates.append(o['name']) else: seen.add(o['code']) error = "The following activities have non-unique codes: {}" raise NonuniqueCode(error.format(duplicates)) data = {(ds['database'], ds['code']): ds for ds in data} if self.db_name in databases: # TODO: Raise error if unlinked exchanges? db = Database(self.db_name) if delete_existing: existing = {} else: existing = db.load(as_dict=True) else: existing = {} if 'format' not in self.metadata: self.metadata['format'] = self.format with warnings.catch_warnings(): warnings.simplefilter("ignore") db = Database(self.db_name, backend=backend) db.register(**self.metadata) self.write_database_parameters(activate_parameters, delete_existing) existing.update(data) db.write(existing) if activate_parameters: self._write_activity_parameters(activity_parameters) print(u"Created database: {}".format(self.db_name)) return db
def test_database_write_adds_to_geomapping(add_biosphere): d = Database("food") d.write(food, process=False) assert "CA" in geomapping assert "CH" in geomapping
def test_bw2_database(): d = Database("biosphere") d.write(biosphere) d = Database("food") d.write(food)
def test_waste_sign_changed(): assert not len(Database("other")) other = Database("other") other.register() other.write({ ("other", "non-waste"): { "name": "production activity", "unit": "kilogram", "location": "GLO", "reference product": "non-waste product", "production amount": 1, # Positive, not a waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "non-waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "non-waste"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-0"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-0"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-1"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-1"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-2"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-2"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-3"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-3"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-4"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-4"), "type": "production", "uncertainty type": 0, }, ], }, ("other", "waste-5"): { "name": "waste treatment activity", "unit": "kilogram", "location": "GLO", "reference product": "waste product", "production amount": -1, # negative, waste treatment "activity type": "ordinary transforming activity", "exchanges": [ { "name": "waste treatment", "unit": "kilogram", "amount": -1.0, "input": ("other", "waste-5"), "type": "production", "uncertainty type": 0, }, ], }, }) assert "other" in databases db = [ { "simapro metadata": dict(), "code": "test_non_waste", "database": "sp", "type": "process", "name": "test_non_waste", "unit": "kilogram", "location": "GLO", "reference product": "anything", "production amount": 1, "exchanges": [ { "name": "test_non_waste", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_non_waste"), "type": "production", "uncertainty type": 0, }, { "name": "some product", "unit": "kilogram", "amount": 1.0, "input": ("other", "non-waste"), "type": "technosphere", "uncertainty type": 0, }, ], }, { "simapro metadata": dict(), "code": "test_waste_0", "database": "sp", "type": "process", "name": "test_waste_0", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "exchanges": [ { "name": "test_waste_0", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_0"), "type": "production", "uncertainty type": 0, }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-0"), "type": "technosphere", "uncertainty type": 0, }, ], }, { "simapro metadata": dict(), "code": "test_waste_1", "database": "sp", "type": "process", "name": "test_waste_1", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "exchanges": [ { "name": "test_waste_1", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_1"), "type": "production", "uncertainty type": 0, }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-1"), "type": "technosphere", "uncertainty type": 1, "loc": 1.0, }, ], }, { "simapro metadata": dict(), "code": "test_waste_2", "database": "sp", "type": "process", "name": "test_waste_2", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "activity type": "ordinary transforming activity", "exchanges": [ { "name": "test_waste_2", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_2"), "type": "production", }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-0"), "type": "technosphere", "uncertainty type": 2, "loc": 0, "scale": 0.1, }, ], }, { "simapro metadata": dict(), "code": "test_waste_3", "database": "sp", "type": "process", "name": "test_waste_3", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "activity type": "ordinary transforming activity", "exchanges": [ { "name": "test_waste_3", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_3"), "type": "production", "uncertainty type": 0, }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-3"), "type": "technosphere", "uncertainty type": 3, "loc": 1.0, "scale": 0.1, }, ], }, { "simapro metadata": dict(), "code": "test_waste_4", "database": "sp", "type": "process", "name": "test_waste_4", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "activity type": "ordinary transforming activity", "exchanges": [ { "name": "test_waste_4", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_4"), "type": "production", "uncertainty type": 0, }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-4"), "type": "technosphere", "uncertainty type": 4, "minimum": 0.5, "maximum": 1.5, }, ], }, { "simapro metadata": dict(), "code": "test_waste_5", "database": "sp", "type": "process", "name": "test_waste_5", "unit": "kilogram", "location": "GLO", "reference product": "anything else", "production amount": 1, "activity type": "ordinary transforming activity", "exchanges": [ { "name": "test_waste_5", "unit": "kilogram", "amount": 1.0, "input": ("sp", "test_waste_5"), "type": "production", "uncertainty type": 0, }, { "name": "waste product", "unit": "kilogram", "amount": 1.0, "input": ("other", "waste-5"), "type": "technosphere", "uncertainty type": 5, "minimum": 0.5, "maximum": 1.5, "loc": 1.0, }, ], }, ] db_before = copy.deepcopy(db) db = flip_sign_on_waste(db, "other") # Check that things did not unexpectedly change expected_unchanged_keys_act = [ "simapro metadata", "code", "database", "name", "unit", "location", "reference product", "type", "production amount", ] expected_unchanged_keys_exc = [ "name", "unit", "input", "type", "uncertainty type", ] for old_act, new_act in zip(db_before, db): for act_k in expected_unchanged_keys_act: assert old_act[act_k] == new_act[act_k] for old_exc, new_exc in zip(old_act["exchanges"], new_act["exchanges"]): for exc_k in expected_unchanged_keys_exc: assert old_exc.get(exc_k, "key not found") == new_exc.get( exc_k, "key not found") # Check that inputs of regular products have not changed assert db[0] == db_before[0] # Check uncertainty types 0 (undefined) for new_exc, old_exc in zip(db[1]["exchanges"], db_before[1]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 # Check uncertainty types 1 (no uncertainty) for new_exc, old_exc in zip(db[2]["exchanges"], db_before[2]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 # Check uncertainty type 2 (lognormal) for new_exc, old_exc in zip(db[3]["exchanges"], db_before[3]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 assert new_exc["loc"] == 0 # ln(1) assert new_exc["scale"] == old_exc["scale"] # no change assert new_exc["negative"] == True # Check uncertainty type 3 (normal) for new_exc, old_exc in zip(db[4]["exchanges"], db_before[4]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 assert new_exc["loc"] == -1 assert new_exc["scale"] == old_exc["scale"] # no change # Check uncertainty type 4 (uniform) for new_exc, old_exc in zip(db[5]["exchanges"], db_before[5]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 assert new_exc["minimum"] == -1.5 assert new_exc["maximum"] == -0.5 # Check uncertainty type 5 (triangular) for new_exc, old_exc in zip(db[6]["exchanges"], db_before[6]["exchanges"]): if new_exc["type"] == "production": assert new_exc == old_exc else: assert new_exc["amount"] == -1 assert new_exc["loc"] == -1 assert new_exc["minimum"] == -1.5 assert new_exc["maximum"] == -0.5
def import_data(self): biosphere_data = { ('biosphere', 'F'): { 'type': 'emission', 'exchanges': [], }, ('biosphere', 'G'): { 'type': 'emission', 'exchanges': [], } } biosphere = Database("biosphere") biosphere.register(depends=[], geocollections=[]) biosphere.write(biosphere_data) inventory_data = { ('inventory', 'U'): { 'type': 'process', 'location': "L", 'exchanges': [ { 'input': ('biosphere', 'F'), 'type': 'biosphere', 'amount': 1 }, { 'input': ('biosphere', 'G'), 'type': 'biosphere', 'amount': 1 }, ] }, ('inventory', 'V'): { 'type': 'process', 'location': "M", 'exchanges': [] }, ('inventory', 'X'): { 'type': 'process', 'location': "N", 'exchanges': [] }, ('inventory', 'Y'): { 'type': 'process', 'location': "O", 'exchanges': [] }, ('inventory', 'Z'): { 'type': 'process', 'location': "O", 'exchanges': [] } } inventory = Database("inventory") inventory.register(depends=["biosphere"], geocollections=["places"]) inventory.write(inventory_data) intersection_data = [ ["L", "A", 1], ["M", "A", 2], ["M", "B", 3], ["N", "B", 5], ["N", "C", 8], ["O", "C", 13], ] inter = Intersection(("places", "regions")) inter.register() inter.write(intersection_data) loading_data = [ [2, "A"], [4, "B"], [8, "C"], ] loading = Loading("loading") loading.register() loading.write(loading_data) method_data = [ [('biosphere', 'F'), 1, "A"], [('biosphere', 'G'), 2, "A"], [('biosphere', 'F'), 3, "B"], [('biosphere', 'G'), 4, "B"], [('biosphere', 'F'), 5, "C"], [('biosphere', 'G'), 6, "C"], ] method = Method(("a", "method")) method.register(geocollections=['regions']) method.write(method_data)
def import_data(self): biosphere_data = { ('biosphere', 'F'): { 'type': 'emission', 'exchanges': [], }, ('biosphere', 'G'): { 'type': 'emission', 'exchanges': [], } } biosphere = Database("biosphere") biosphere.register(depends=[], geocollections=[]) biosphere.write(biosphere_data) inventory_data = { ('inventory', 'U'): { 'type': 'process', 'location': "L", 'exchanges': [ { 'input': ('biosphere', 'F'), 'type': 'biosphere', 'amount': 1 }, { 'input': ('biosphere', 'G'), 'type': 'biosphere', 'amount': 1 }, ] }, ('inventory', 'V'): { 'type': 'process', 'location': "M", 'exchanges': [] }, ('inventory', 'X'): { 'type': 'process', 'location': "N", 'exchanges': [] }, ('inventory', 'Y'): { 'type': 'process', 'location': "O", 'exchanges': [] }, ('inventory', 'Z'): { 'type': 'process', 'location': "O", 'exchanges': [] } } inventory = Database("inventory") inventory.register(depends=["biosphere"], geocollections=["places"]) inventory.write(inventory_data) method_data = [ [('biosphere', 'F'), 1, "L"], [('biosphere', 'G'), 2, "L"], ] method = Method(("a", "method")) method.register(geocollections=['places']) method.write(method_data)
def test_match_subcategories(self): self.maxDiff = None background = [ { 'categories': ('air', 'non-urban air or from high stacks'), 'code': 'first', 'database': 'b', 'exchanges': [], 'name': 'Boron trifluoride', 'type': 'emission', 'unit': 'kilogram' }, { 'categories': ('air', 'low population density, long-term'), 'code': 'second', 'database': 'b', 'exchanges': [], 'name': 'Boron trifluoride', 'type': 'emission', 'unit': 'kilogram' }, { 'categories': ('air', 'lower stratosphere + upper troposphere'), 'code': 'third', 'database': 'b', 'exchanges': [], 'name': 'Boron trifluoride', 'type': 'emission', 'unit': 'kilogram' }, { # Skip - root category 'categories': ('air', ), 'code': 'fourth', 'database': 'b', 'exchanges': [], 'name': 'Boron trifluoride', 'type': 'emission', 'unit': 'kilogram' }, { # Should be skipped - wrong type 'categories': ('air', 'skip me'), 'code': 'Bill. My friends just call me Bill.', 'database': 'b', 'exchanges': [], 'name': 'Boron trifluoride', 'type': 'something else', 'unit': 'kilogram' } ] db = Database('b') db.register() db.write({(obj['database'], obj['code']): obj for obj in background}) data = [{ 'name': 'Some LCIA method', 'exchanges': [ { 'name': 'Boron trifluoride', 'categories': ('air', ), 'unit': 'kilogram', # Only for CFs - no need for biosphere filter # 'type': 'biosphere', 'amount': 1, }, { 'name': 'Boron trifluoride', 'categories': ('air', 'lower stratosphere + upper troposphere'), 'unit': 'kilogram', 'amount': 0, } ] }] expected = [{ 'name': 'Some LCIA method', 'exchanges': [{ 'name': 'Boron trifluoride', 'categories': ('air',), 'unit': 'kilogram', 'amount': 1, }, { # Not linked - already has subcategories 'categories': ('air', 'lower stratosphere + upper troposphere'), 'name': 'Boron trifluoride', 'unit': 'kilogram', 'amount': 0, }, { 'categories': ('air', 'low population density, long-term'), 'database': 'b', 'name': 'Boron trifluoride', 'unit': 'kilogram', 'input': ('b', 'second'), 'amount': 1, }, { 'amount': 1, 'categories': ('air', 'non-urban air or from high stacks'), 'database': 'b', 'input': ('b', 'first'), 'name': 'Boron trifluoride', 'unit': 'kilogram' }] }] answer = match_subcategories(data, 'b', remove=False) self.assertEqual(expected, answer)
def create_database(self, name, data): db = Database(name) db.register() db.write(data) db.process()
def create_db(self): bw2setup() create_climate_methods() data = { ("clima", "co2"): { 'name': "co2", 'exchanges': [ { 'amount': 1, 'input': (db_bio.name, '349b29d1-3e58-4c66-98b9-9d1a076efd2e' ), #fossil fuel carbon dioxide thus normal CF 'type': 'biosphere' }, ], 'type': 'process', }, ('clima', 'ch4'): { 'name': "ch4", 'exchanges': [ { 'amount': 1, 'input': (db_bio.name, 'da1157e2-7593-4dfd-80dd-a3449b37a4d8' ), #Methane non fossil thus normal CF 'type': 'biosphere' }, ], 'type': 'process', }, ('clima', 'ch4_fossil'): { 'name': "ch4_fossil", 'exchanges': [ { 'amount': 1, 'input': (db_bio.name, '0795345f-c7ae-410c-ad25-1845784c75f5' ), #Methane fossil thus consider conversion to co2 'type': 'biosphere' }, ], 'type': 'process', }, ('clima', 'co2bio_test'): { 'name': "co2bio_test", 'exchanges': [ { 'amount': 1, 'input': (db_bio.name, 'cc6a1abb-b123-4ca6-8f16-38209df609be' ), #biogenic carbon dioxide 'type': 'biosphere' }, ], 'type': 'process', } } db = Database('clima') db.write(data)