def foreground(self, process, ref_flow=None): """ Returns a list of terminated exchanges beginning with the named process and reference flow, and containing the entire foreground (product system model). Dependencies and emissions are not included. :param process: :param ref_flow: :return: """ product_flow = self._get_product_flow(process, ref_flow) if self._tstack.is_background(product_flow): fg = [] else: fg = self._tstack.foreground(product_flow) _af, _ad, _bf = self._be.make_foreground(product_flow) # first, reference flow yield ExchangeValue(product_flow.process, product_flow.flow, product_flow.direction, value=1.0) # then, child fragments rows, cols = _af.nonzero() for i in range(len(rows)): node = fg[cols[i]] term = fg[rows[i]] yield ExchangeValue(node.process, term.flow, comp_dir(term.direction), value=_af.data[i], termination=term.process.external_ref) '''
def _add_alloc_refs(self, arg, flow=None): col_idx = len(self._columns) for k in arg.references(flow=flow): rx = ExchangeValue( k.process, k.flow, k.direction, value=1.0) # we do this to avoid calling RxRef.value # (and because table exchanges are normalized to this value, so 1.0 is the only correct value to report) rx.set_ref(k.process) row = k.direction, True, '; '.join( k.flow['Compartment']), k.flow['Name'] self._add_rowitem(col_idx, rx, row=row) self._columns.append(arg)
def to_exchange(self): if self.is_null: return None return ExchangeValue(self.term_node, self.term_flow, self.direction, value=self.inbound_exchange_value)
def _background_emissions(self, bg_product_flow): _, _, _bf = self._be.make_foreground(bg_product_flow) rows, cols = _bf.nonzero() for i in range(len(rows)): assert cols[i] == 0 emis = self._be.emissions[rows[i]] yield ExchangeValue(bg_product_flow.process, emis.flow, emis.direction, value=_bf.data[i])
def _unobserved_exchanges(self): """ Generator which yields exchanges from the term node's inventory that are not found among the child flows, for LCIA purposes :return: """ if self.is_fg: x = ExchangeValue(self._parent, self.term_flow, self.direction, value=self.node_weight_multiplier) yield x elif self.term_is_bg: for x in []: yield x # elif self.is_frag: # fragments can have unobserved exchanges too! # for x in []: # yield x else: children = set() children.add((self.term_flow.external_ref, self.direction)) for c in self._parent.child_flows: children.add((c.flow.external_ref, c.direction)) if self.is_bg: iterable = self.term_node.lci(ref_flow=self.term_flow) else: iterable = self.term_node.inventory(ref_flow=self.term_flow, direction=self.direction) for x in iterable: if (x.flow.external_ref, x.direction) not in children: yield x
def _background_dependencies(self, bg_product_flow): _, _ad, _ = self._be.make_foreground(bg_product_flow) rows, cols = _ad.nonzero() for i in range(len(rows)): assert cols[i] == 0 term = self._tstack.bg_node(rows[i]) yield ExchangeValue(bg_product_flow.process, term.flow, comp_dir(term.direction), value=_ad.data[i], termination=term.process.external_ref)
def inventory(self, process, ref_flow=None, show=None): """ Report the direct dependencies and exterior flows for the named product flow. If the second argument is non-None, print the inventory instead of returning it. This should be identical to product_flow.process.inventory() so WHY DID I WRITE IT?????? ans: because it exposes the allocated matrix model. so it's not the same for allocated processes. :param process: :param ref_flow: required for multi-product case :param show: [None] if present, show rather than return the inventory. :return: a list of exchanges. """ product_flow = self._get_product_flow(process, ref_flow) ref_ex = ExchangeValue(product_flow.process, product_flow.flow, product_flow.direction, value=1.0) ref_ex.set_ref(product_flow.process) interior = [ref_ex] exterior = [] if self._tstack.is_background(product_flow): # need to index into background matrix _af, _ad, _bf = self._be.make_foreground(product_flow) for i, row in enumerate(_ad.nonzero()[0]): dep = self._tstack.bg_node(row) dat = _ad.data[i] if dat < 0: dirn = 'Output' else: dirn = 'Input' interior.append(ExchangeValue(product_flow.process, dep.flow, dirn, value=abs(dat))) for i, row in enumerate(_bf.nonzero()[0]): ems = self._be.emissions[row] dat = _bf.data[i] exterior.append(ExchangeValue(product_flow.process, ems.emission.flow, ems.emission.direction, value=dat)) else: # need to simply access the sparse matrix entries for fg in self._be.foreground_dependencies(product_flow): dat = fg.value if dat < 0: dirn = 'Output' else: dirn = 'Input' interior.append(ExchangeValue(product_flow.process, fg.term.flow, dirn, value=dat)) for em in self._be.foreground_emissions(product_flow): exterior.append(ExchangeValue(product_flow.process, em.emission.flow, em.emission.direction, value=em.value)) if show is None: return interior + exterior else: for x in interior: print('%s' % x) print('Exterior') for x in exterior: print('%s' % x)
def _add_lcia_detail(self, res, entity, detail, loc='GLO'): flow = self._archive.retrieve_or_fetch_entity('flows/%s' % detail['flowID']) exch = ExchangeValue(entity, flow, detail['direction'], value=detail['quantity']) fact = Characterization(flow, res.quantity, value=detail['factor'], location=loc) res.add_score(entity.external_ref, exch, fact, loc)
def allocated_exchanges(self, reference, strict=False): # need to disambiguate the reference in_scenario = False if isinstance(reference, LcFlow): ref = self.find_reference(reference, strict=strict) elif reference in self._scenarios.keys(): in_scenario = True ref = self._scenarios[reference] else: try: ref = self.find_reference(reference, strict=strict) except NoReferenceFound: pass # will fail if any exchanges are allocated for i in sorted(self._exchanges, key=lambda t: t.direction): if isinstance(i, AllocatedExchange): if in_scenario: yield ExchangeValue.from_scenario(i, reference, ref) else: yield ExchangeValue.from_allocated(i, ref.get_uuid()) else: yield i
def retrieve_lcia_scores(self, process_uuid, rf_uuid, quantities=None): """ This function retrieves LCIA scores from an Ecospold02 file and stores them as characterizations in an LcFlow entity corresponding to the *first* (and presumably, only) reference intermediate flow Only stores cfs for quantities that exist locally. :param process_uuid: :param rf_uuid: :param quantities: list of quantity entities to look for (defaults to all local lcia_methods) :return: a dict of quantity uuid to score """ if quantities is None: quantities = [l for l in self.entities_by_type('quantity') if l.is_lcia_method()] import time start_time = time.time() print('Loading LCIA results for %s_%s' % (process_uuid, rf_uuid)) o = self.objectify(process_uuid, rf_uuid) self._print('%30.30s -- %5f' % ('Objectified', time.time() - start_time)) p = self._create_process_entity(o) rf = self._grab_reference_flow(o, rf_uuid) exch = ExchangeValue(p, rf, 'Output', value=1.0) tags = dict() for q in quantities: if 'Method' in q.keys(): if q['Name'] in tags: raise KeyError('Name collision %s' % q['Name']) tags[q['Name']] = q results = LciaResults(p) for char in find_tag(o, 'flowData').getchildren(): if 'impactIndicator' in char.tag: m = char.impactMethodName.text c = char.impactCategoryName.text i = char.name.text v = float(char.get('amount')) my_tag = ', '.join([m, c, i]) if my_tag in tags: q = tags[my_tag] result = LciaResult(q) cf = Characterization(rf, q, value=v, location=p['SpatialScope']) result.add_score(p.get_uuid(), exch, cf, p['SpatialScope']) results[q.get_uuid()] = result self._print('%30.30s -- %5f' % ('Impact scores collected', time.time() - start_time)) return results
def inventory(self, ref_flow=None, direction=None): """ generate a process's exchanges. If no reference is supplied, generate unallocated exchanges, including all reference exchanges. If a reference is supplied AND the process is allocated with respect to that reference, generate ExchangeValues as allocated to that reference flow, and exclude reference exchanges. If a reference is supplied but the process is NOT allocated to that reference, generate unallocated ExchangeValues (excluding the reference itself). Reference must be a flow or exchange found in the process's reference entity. :param ref_flow: :param direction: could help with finding reference :return: """ if ref_flow is None: ref_exch = None else: try: ref_exch = self.find_exchange(ref_flow, direction=direction) except MultipleReferencesFound: ref_exch = self.find_exchange(ref_flow, direction=direction, reference=True) for i in self._exchanges.values(): if ref_exch is None: # generate unallocated exchanges yield i elif ref_exch.is_reference: # generate allocated, normalized, non-reference exchanges if i in self.reference_entity: continue else: yield ExchangeValue.from_allocated(i, ref_exch) else: # generate un-allocated, normalized, non-query exchanges if i is ref_exch: continue else: yield ExchangeValue.from_allocated(i, ref_exch)
def emissions(self, process, ref_flow=None): """ Return a single node's direct emissions (B_f or B) as a list of exchanges :param process: :param ref_flow: :return: """ pf = self._get_product_flow(process, ref_flow) if self._tstack.is_background(pf): for em in self._background_emissions(pf): yield em else: for em in self._be.foreground_emissions(pf): # em isa CutoffEntry yield ExchangeValue(em.parent.process, em.emission.flow, em.emission.direction, value=em.value)
def lci(self, process, ref_flow=None, **kwargs): """ Wrapper for compute_lci, returns exchanges with flows (and characterizations) drawn from self.archive :param process: :param ref_flow: required for multi-product case :param kwargs: passed to iterative solver: threshold=1e-8, count=100 :return: list of exchanges. """ product_flow = self._get_product_flow(process, ref_flow) in_ex = sum(x.value for x in product_flow.process.exchange_values(product_flow.flow, direction=product_flow.direction)) # in_ex = process.reference(ref_flow).value # LCI values get normalized by inbound exchange-- we must de-norm b = self._be.compute_lci(product_flow, **kwargs) # comes back as a sparse vector for i, em in enumerate(self._be.emissions): if b[i, 0] != 0: yield ExchangeValue(product_flow.process, em.flow, em.direction, value=b[i, 0] * in_ex)
def dependencies(self, process, ref_flow=None): """ Return a single node's direct dependencies (A_d or A) as a list of exchanges :param process: :param ref_flow: :return: """ pf = self._get_product_flow(process, ref_flow) if self._tstack.is_background(pf): for em in self._background_dependencies(pf): yield em else: deps = [dep for dep in self._be.foreground_dependencies(pf) # dep isa MatrixEntry if self.is_in_background(dep.term.process, dep.term.flow)] for dep in sorted(deps, key=lambda x: self._be.tstack.bg_dict(x.term.index)): dat = dep.value dirn = 'Output' if dat < 0 else 'Input' yield ExchangeValue(dep.parent.process, dep.term.flow, dirn, value=abs(dat), termination=dep.term.process.external_ref)
def from_cfs(cls, fragment, cfs, scenario=None, location=None): """ for foreground-terminated elementary flows returns a dict of LciaResults :param fragment: :param cfs: a dict of q-uuid to cf sets??? :param scenario: :param location: :return: """ results = LciaResults(fragment) exch = ExchangeValue(fragment, fragment.flow, fragment.direction, value=1.0) for q, cf in cfs.items(): qu = q.get_uuid() results[qu] = cls(q, scenario=scenario) if isinstance(cf, Characterization): results[qu].add_component(fragment.get_uuid(), entity=fragment) results[qu].add_score(fragment.get_uuid(), exch, cf, location) return results
def product_flow(self, process, ref_flow=None): product_flow = self._get_product_flow(process, ref_flow=ref_flow) return ExchangeValue(product_flow.process, product_flow.flow, product_flow.direction, value=1.0)
def add_exchange(self, flow, dirn, reference=None, value=None, termination=None, add_dups=False): """ This is used to create Exchanges and ExchangeValues and AllocatedExchanges. If the flow+dir+term is already in the exchange set: if no reference is specified and/or no value is specified- nothing to do otherwise (if reference and value are specified): upgrade the exchange to an allocatedExchange and add the new reference exch val otherwise: if reference is specified, create an AllocatedExchange otherwise create an Exchange / ExchangeValue :param flow: :param dirn: :param reference: :param value: :param termination: :param add_dups: (False) set to true to handle "duplicate exchange" errors by cumulating their values :return: """ _x = hash((self.uuid, flow.external_ref, dirn, termination)) if _x in self._exchanges: if value is None or value == 0: return None e = self._exchanges[_x] if reference is None: if isinstance(value, dict): e.update(value) else: try: e.value = value # this will catch already-set errors except DuplicateExchangeError: if add_dups: e.add_to_value(value) else: print('Duplicate exchange in process %s:\n%s' % (self.get_uuid(), e)) raise return e else: try: e[reference] = value # this will catch already-set errors except DuplicateExchangeError: if add_dups: e.add_to_value(value, reference=reference) else: print('Duplicate exchange in process %s:\n%s' % (self.get_uuid(), e)) raise except ValueError: print( 'Error adding [%s] = %10.3g for exchange\n%s\nto process\n%s' % (reference.flow.external_ref, value, e, self.external_ref)) raise return e else: if isinstance(value, Number) or value is None: if reference is None: e = ExchangeValue(self, flow, dirn, value=value, termination=termination) else: if reference not in self.reference_entity: raise KeyError( 'Specified reference is not registered with process: %s' % reference) e = ExchangeValue(self, flow, dirn, value=None, termination=termination) e[reference] = value elif isinstance(value, dict): e = ExchangeValue(self, flow, dirn, value_dict=value, termination=termination) else: raise TypeError('Unhandled value type %s' % type(value)) # This is the only point an exchange is added to the process self._exchanges[e.key] = e self._exch_map[e.flow.external_ref].add(e) return e
def foreground(self, process, ref_flow=None, **kwargs): process, ref_flow = self._check_ref(process, ref_flow) for x in self._flat.foreground(process, ref_flow): yield ExchangeValue(self[x.process], self[x.flow], x.direction, termination=x.term, value=x.value)
def add_exchange(self, flow, dirn, reference=None, value=None, add_dups=False, **kwargs): """ This is used to create Exchanges and ExchangeValues and AllocatedExchanges. If the flow+dirn is already in the exchange set: if no reference is specified and/or no value is specified- nothing to do otherwise (if reference and value are specified): upgrade the exchange to an allocatedExchange and add the new reference exch val otherwise: if reference is specified, create an AllocatedExchange otherwise create an Exchange / ExchangeValue :param flow: :param dirn: :param reference: :param value: :param add_dups: (False) set to true to handle "duplicate exchange" errors by cumulating their values :return: """ _x = Exchange(self, flow, dirn, **kwargs) if _x in self._exchanges: if value is None or value == 0: return None e = [x for x in self._exchanges if x == _x][0] if reference is None: if isinstance(e, AllocatedExchange): try: e.value = value # this will catch already-set errors except DuplicateExchangeError: if add_dups: e.add_to_value(value) else: print('Duplicate exchange in process %s:\n%s' % (self.get_uuid(), e)) raise return e else: try: exch = ExchangeValue.from_exchange(e, value=value) # this will catch already-set errors self._exchanges.remove(e) self._exchanges.add(exch) return exch except DuplicateExchangeError: if add_dups: e.add_to_value(value) return e else: print('Duplicate exchange in process %s:\n%s' % (self.get_uuid(), e)) raise else: exch = AllocatedExchange.from_exchange(e) if isinstance(value, dict): exch.update(value) else: try: exch[reference] = value # this will catch already-set errors except DuplicateExchangeError: if add_dups: exch.add_to_value(value, reference=reference) else: print('Duplicate exchange in process %s:\n%s' % (self.get_uuid(), e)) raise self._exchanges.remove(e) self._exchanges.add(exch) return exch else: if value is None or value == 0: e = _x elif isinstance(value, float): if reference is None: e = ExchangeValue(self, flow, dirn, value=value, **kwargs) else: if reference not in self.reference_entity: raise KeyError('Specified reference is not registered with process: %s' % reference) e = AllocatedExchange(self, flow, dirn, value=value, **kwargs) e[reference] = value elif isinstance(value, dict): e = AllocatedExchange.from_dict(self, flow, dirn, value=value, **kwargs) else: raise TypeError('Unhandled value type %s' % type(value)) if e in self._exchanges: raise KeyError('Exchange already present') self._exchanges.add(e) return e
def _direct_exchanges(self, node, x_iter): for x in x_iter: yield ExchangeValue(node, self[x.flow], x.direction, termination=x.term, value=x.value)
def flatten(self, _apply_scale=1.0): """ Return a new LciaResult in which all groupings have been replaced by a set of AggregatedLciaScores, one per elementary flow. :param: _apply_scale: [1.0] apply a node weighting to the components :return: """ flat = LciaResult(self.quantity, scenario=self.scenario, private=self._private, scale=1.0) recurse = [] # store flattened summary scores to handle later for k, c in self._LciaScores.items(): if isinstance(c, SummaryLciaResult): if c.static: flat.add_summary(k, c.entity, c.node_weight * _apply_scale, c.unit_score) else: recurse.append(c.flatten()) else: for d in c.details(): flat.add_component(d.flow.uuid, d.flow) # create a new exchange that has already had scaling applied exch = ExchangeValue(d.exchange.process, d.flow, d.exchange.direction, value=d.value * _apply_scale) flat.add_score(d.flow.uuid, exch, d.factor, d.location) for r in recurse: for k in r.keys(): c = r[k] if isinstance(c, SummaryLciaResult): # guaranteed to be static since r is a flattened LciaResult if not c.static: raise InconsistentSummaries(c) try: flat.add_summary(k, c.entity, c.node_weight * _apply_scale, c.unit_score) except InconsistentScores: print('for key %s' % k) raise else: for d in c.details(): flat.add_component(d.flow.uuid, d.flow) exch = ExchangeValue(d.exchange.process, d.flow, d.exchange.direction, value=d.value * _apply_scale) flat.add_score(k, exch, d.factor, d.location) scaled_total = self.total() * _apply_scale if not isclose(scaled_total, flat.total(), rel_tol=1e-10): print(' LciaResult: %10.4g' % scaled_total) print('Flat result: %10.4g' % flat.total()) print('Difference: %10.4g @ %10.4g' % (flat.total() - scaled_total, _apply_scale)) if not isclose(scaled_total, flat.total(), rel_tol=1e-6): raise ValueError( 'Total differs by greater than 1e-6! (applied scaling=%10.4g)' % _apply_scale) return flat