def load_activities(self): print('Handling activities...') _activity = self._little_read('activity overview') for index, row in _activity.iterrows(): if self.internal: u = row['Activity UUID'] else: u = row['activity uuid'] u = str(uuid.UUID(u)) if self[u] is None: """ create the process """ n = row['activityName'] if self.internal: g = row['Geography'] st = 'interval(%s, %s)' % (row['Start'], row['End']) c = row['Tags'] else: g = row['geography'] st = {'begin': row['start date'], 'end': row['end date']} c = row['tags'] p = LcProcess(u, Name=n, Comment=c, SpatialScope=g, TemporalScope=st) try: if self.internal: p['TechnologyLevel'] = row['Technology Level'] else: p['TechnologyLevel'] = row['technologyLevel'] except KeyError: pass try: p['Classifications'] = [row['ISIC class']] p['IsicNumber'] = row['ISIC number'] except KeyError: pass self.add(p) else: p = self[u] """ Now, handle the flows """ if self.internal: exch_name = row['Product'] ref_check = 'Product Type' else: exch_name = row['product name'] ref_check = 'group' exch_flow = self[self._key_to_nsuuid(exch_name)] p.add_exchange(exch_flow, 'Output') if row[ref_check] == 'ReferenceProduct': p.add_reference(exch_flow, 'Output')
def load_activities(self): print('Handling activities...') _activity = self._little_read('activity overview') for index, row in _activity.iterrows(): if self.internal: u = row['Activity UUID'] else: u = row['activity uuid'] u = uuid.UUID(u) if self[u] is None: """ create the process """ n = row['activityName'] if self.internal: g = row['Geography'] st = 'interval(%s, %s)' % (row['Start'], row['End']) c = row['Tags'] else: g = row['geography'] st = {'begin': row['start date'], 'end': row['end date']} c = row['tags'] p = LcProcess(u, Name=n, Comment=c, SpatialScope=g, TemporalScope=st) try: if self.internal: p['TechnologyLevel'] = row['Technology Level'] else: p['TechnologyLevel'] = row['technologyLevel'] except KeyError: pass try: p['Classifications'] = [row['ISIC class']] p['IsicNumber'] = row['ISIC number'] except KeyError: pass self.add(p) else: p = self[u] """ Now, handle the flows """ if self.internal: exch_name = row['Product'] ref_check = 'Product Type' else: exch_name = row['product name'] ref_check = 'group' exch_flow = self[self._key_to_id(exch_name)] if row[ref_check] == 'ReferenceProduct': p.add_reference(exch_flow, 'Output') p.add_exchange(exch_flow, 'Output')
def _create_process(self, filename): """ Extract dataset object from XML file :param filename: :return: """ o = self._get_objectified_entity(filename) p_meta = o.dataset.metaInformation.processInformation n = p_meta.referenceFunction.get('name') u = self._key_to_nsuuid(n) try_p = self[u] if try_p is not None: p = try_p assert p.entity_type == 'process', "Expected process, found %s" % p.entity_type else: # create new process g = p_meta.geography.get('location') stt = {'begin': str(find_tag(p_meta, 'startDate')), 'end': str(find_tag(p_meta, 'endDate'))} c = p_meta.referenceFunction.get('generalComment') cls = [p_meta.referenceFunction.get('category'), p_meta.referenceFunction.get('subCategory')] p = LcProcess(u, Name=n, Comment=c, SpatialScope=g, TemporalScope=stt, Classifications=cls) p.set_external_ref(n) rf, flowlist = self._extract_exchanges(o) for flow, f_dir, val, cmt in flowlist: self._print('Exch %s [%s] (%g)' % (flow, f_dir, val)) x = p.add_exchange(flow, f_dir, reference=None, value=val, add_dups=True) if cmt is not None: x.comment = cmt for ref in rf: p.add_reference(ref, 'Output') self.add(p) return p
def _extract_and_reduce_lci(node): """ This function performs the (surpassingly slow) task of extracting reduced aggregated LCI results from the large bundled ecoinvent 7z LCI archives. (The operation is slow on RAM-limited machines because the 7z algorithm requires tremendous memory) The routine requires the process inventory test case to be selected manually. The selected inventory is loaded, and then one hundred exchanges are selected at random and the rest are removed. This reduces the file size (and load time) of the generated archives without sacrificing the representativeness of the computation. :param node: :return: """ filename = lci_cache_file(node.version, node.model) ref = test_ref(node.version, node.model) if os.path.exists(filename): a = archive_from_json(filename, ref=ref) else: a = LcArchive(filename, ref=ref) if ref not in cat.references: cat.add_existing_archive(a, interfaces='inventory', static=True) try: cat.query(ref).get(node.node) return except EntityNotFound: pass cat.get_resource(ref, 'inventory').remove_archive() lci_ref = find_lci_ref(node.version, node.model) if lci_ref is None: print('No LCI resource for (%s, %s)' % (node.version, node.model)) return print( 'WARNING: extracting and reducing LCI data can be very slow >60s per file' ) p_ref = cat.query(lci_ref).get(node.node) if p_ref is None: print('No process found with reference %s' % node.node) return p_rx = next(p_ref.references()) exchs = random.sample([_x for _x in p_ref.inventory(ref_flow=p_rx)], 100) p_slim = LcProcess(p_ref.uuid, Name=p_ref['Name']) p_slim.add_exchange(p_rx.flow, p_rx.direction, value=p_ref.reference_value(p_rx.flow)) p_slim.add_reference(p_rx.flow, p_rx.direction) for x in exchs: p_slim.add_exchange(x.flow, x.direction, value=x.value, termination=x.termination) a.add_entity_and_children(p_slim) a.write_to_file(filename, complete=True, gzip=True)
def _create_process(self, filename): """ Extract dataset object from XML file :param filename: :return: """ o = self._get_objectified_entity(filename) rf = None # reference flow flowlist = [] for exch in o.dataset.flowData.getchildren(): f = self._create_flow(exch) if hasattr(exch, 'outputGroup'): d = 'Output' if exch.outputGroup == 0: assert rf is None, "Multiple reference flows found!" rf = f elif hasattr(exch, 'inputGroup'): d = 'Input' else: raise DirectionlessExchangeError local_q = self._create_quantity(exch.get("unit")) v = float(exch.get('meanValue')) # returns none if missing if local_q is not f.reference_entity: v = v / f.cf(local_q) flowlist.append((f, d, v)) p_meta = o.dataset.metaInformation.processInformation n = p_meta.referenceFunction.get('name') u = self._key_to_id(n) try_p = self[u] if try_p is not None: p = try_p assert p.entity_type == 'process', "Expected process, found %s" % p.entity_type else: # create new process g = p_meta.geography.get('location') stt = {'begin': str(find_tag(p_meta, 'startDate')[0]), 'end': str(find_tag(p_meta, 'endDate')[0])} c = p_meta.referenceFunction.get('generalComment') cls = [p_meta.referenceFunction.get('category'), p_meta.referenceFunction.get('subCategory')] p = LcProcess(u, Name=n, Comment=c, SpatialScope=g, TemporalScope=stt, Classifications=cls) p.set_external_ref(n) if rf is None: rx = None else: rx = p.add_reference(rf, 'Output') for flow, f_dir, val in flowlist: self._print('Exch %s [%s] (%g)' % (flow, f_dir, val)) p.add_exchange(flow, f_dir, reference=None, value=val, add_dups=True) self.add(p) return p