def content(user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'llfcs.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ('Chellow Id', 'DNO Code', 'Code', 'Description', 'Voltage Level', 'Is Substation?', 'Is Import?', 'Valid From', 'Valid To')) for llfc in sess.query(Llfc).order_by(Llfc.id).options( joinedload(Llfc.dno), joinedload(Llfc.voltage_level)): writer.writerow( (str(llfc.id), llfc.dno.dno_code, llfc.code, llfc.description, llfc.voltage_level.code, llfc.is_substation, llfc.is_import, hh_format(llfc.valid_from), hh_format(llfc.valid_to))) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, sess): try: finish_date = Datetime(year, month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) reads = sess.query(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date))) \ .join(Bill).order_by(Bill.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) reads = reads.filter(Bill.supply == supply) yield ','.join( ( 'Duration Start', 'Duration Finish', 'Supply Id', 'Import MPAN Core', 'Export MPAN Core', 'Batch Reference', 'Bill Id,Bill Reference', 'Bill Issue Date', 'Bill Type', 'Register Read Id', 'TPR', 'Coefficient', 'Previous Read Date', 'Previous Read Value', 'Previous Read Type', 'Present Read Date', 'Present Read Value', 'Present Read Type')) + '\n' for read in reads: bill = read.bill supply = bill.supply batch = bill.batch era = supply.find_era_at(sess, bill.start_date) if era is None: eras = sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all() if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] yield ','.join( '"' + ('' if val is None else str(val)) + '"' for val in [ hh_format(start_date), hh_format(finish_date), supply.id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, hh_format(bill.issue_date), bill.bill_type.code, read.id, 'md' if read.tpr is None else read.tpr.code, read.coefficient, hh_format(read.previous_date), read.previous_value, read.previous_type.code, hh_format(read.present_date), read.present_value, read.present_type.code]) + '\n' except: yield traceback.format_exc()
def content(start_date, finish_date, contract_id, sess): caches = {} try: contract = Contract.get_mop_by_id(sess, contract_id) forecast_date = chellow.computer.forecast_date() yield ','.join( ( 'Import MPAN Core', 'Export MPAN Core', 'Start Date', 'Finish Date')) bill_titles = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles', None)() for title in bill_titles: yield ',' + title yield '\n' for era in sess.query(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.mop_contract_id == contract.id).order_by(Era.supply_id): import_mpan_core = era.imp_mpan_core if import_mpan_core is None: import_mpan_core_str = '' else: mpan_core = import_mpan_core is_import = True import_mpan_core_str = mpan_core export_mpan_core = era.exp_mpan_core if export_mpan_core is None: export_mpan_core_str = '' else: is_import = False mpan_core = export_mpan_core export_mpan_core_str = mpan_core yield import_mpan_core_str + ',' + export_mpan_core_str + ',' + \ hh_format(start_date) + ',' + hh_format(finish_date) + ',' supply_source = chellow.computer.SupplySource( sess, start_date, finish_date, forecast_date, era, is_import, None, caches) chellow.computer.contract_func( caches, contract, 'virtual_bill', None)(supply_source) bill = supply_source.mop_bill for title in bill_titles: if title in bill: yield '"' + str(bill[title]) + '",' del bill[title] else: yield ',' for k in sorted(bill.keys()): yield ',"' + k + '","' + str(bill[k]) + '"' yield '\n' except: yield traceback.format_exc()
def hh(data_source): ssp_rate_set = data_source.supplier_rate_sets['ssp-rate'] sbp_rate_set = data_source.supplier_rate_sets['sbp-rate'] try: system_price_cache = data_source.caches['system_price'] except KeyError: data_source.caches['system_price'] = {} system_price_cache = data_source.caches['system_price'] try: future_funcs = data_source.caches['future_funcs'] except KeyError: future_funcs = {} data_source.caches['future_funcs'] = future_funcs db_id = get_non_core_contract_id('system_price') try: future_funcs[db_id] except KeyError: future_funcs[db_id] = { 'start_date': None, 'func': create_future_func(1, 0)} for h in data_source.hh_data: try: sbp, ssp = system_price_cache[h['start-date']] except KeyError: db_id = get_non_core_contract_id('system_price') h_start = h['start-date'] rates = data_source.hh_rate(db_id, h_start, 'gbp_per_nbp_mwh') try: rdict = rates[key_format(h_start)] sbp = rdict['sbp'] / 1000 ssp = rdict['ssp'] / 1000 system_price_cache[h_start] = (sbp, ssp) except KeyError: raise BadRequest( "For the System Price rate script at " + hh_format(h_start) + " the rate cannot be found.") except TypeError: raise BadRequest( "For the System Price rate script at " + hh_format(h_start) + " the rate 'rates_gbp_per_mwh' has the problem: " + traceback.format_exc()) h['sbp'] = sbp h['sbp-gbp'] = h['nbp-kwh'] * sbp sbp_rate_set.add(sbp) h['ssp'] = ssp h['ssp-gbp'] = h['nbp-kwh'] * ssp ssp_rate_set.add(ssp)
def _make_row(llfc, ss_llfc): if ( llfc.voltage_level.code != ss_llfc["voltage_level"] or llfc.is_substation != ss_llfc["is_substation"] ): if llfc.voltage_level.code != ss_llfc["voltage_level"]: new_vl = ss_llfc["voltage_level"] else: new_vl = "{no change}" if llfc.is_substation != ss_llfc["is_substation"]: new_ss = ss_llfc["is_substation"] else: new_ss = "{no change}" return ( "update", "llfc", llfc.dno.dno_code, llfc.code, hh_format(llfc.valid_from), "{no change}", new_vl, new_ss, "{no change}", "{no change}", )
def hh(data_source): bill = data_source.supplier_bill try: cache = data_source.caches['rcrc'] except KeyError: cache = {} data_source.caches['rcrc'] = cache rate_set = data_source.supplier_rate_sets['rcrc-rate'] for hh in data_source.hh_data: try: hh['rcrc-gbp-per-kwh'] = rcrc = cache[hh['start-date']] except KeyError: h_start = hh['start-date'] db_id = get_non_core_contract_id('rcrc') rates = data_source.hh_rate(db_id, h_start, 'rates') try: hh['rcrc-gbp-per-kwh'] = rcrc = cache[h_start] = \ rates[h_start.strftime("%d %H:%M Z")] / 1000 except KeyError: raise BadRequest( "For the RCRC rate script at " + hh_format(h_start) + " the rate cannot be found.") rate_set.add(rcrc) bill['rcrc-kwh'] += hh['nbp-kwh'] bill['rcrc-gbp'] += hh['nbp-kwh'] * rcrc
def hh(data_source): bill = data_source.supplier_bill try: cache = data_source.caches['rcrc'] except KeyError: cache = {} data_source.caches['rcrc'] = cache rate_set = data_source.supplier_rate_sets['rcrc-rate'] for hh in data_source.hh_data: try: hh['rcrc-gbp-per-kwh'] = rcrc = cache[hh['start-date']] except KeyError: h_start = hh['start-date'] db_id = get_non_core_contract_id('rcrc') rates = data_source.hh_rate(db_id, h_start)['rates'] try: hh['rcrc-gbp-per-kwh'] = rcrc = cache[h_start] = float( rates[key_format(h_start)]) / 1000 except KeyError: try: dt = h_start - relativedelta(days=3) hh['rcrc-gbp-per-kwh'] = rcrc = cache[h_start] = float( rates[key_format(dt)]) / 1000 except KeyError: raise BadRequest("For the RCRC rate script at " + hh_format(dt) + " the rate cannot be found.") rate_set.add(rcrc) bill['rcrc-kwh'] += hh['nbp-kwh'] bill['rcrc-gbp'] += hh['nbp-kwh'] * rcrc
def mpan_bit( sess, supply, is_import, num_hh, era, chunk_start, chunk_finish, forecast_date, caches): mpan_core_str = llfc_code = sc_str = supplier_contract_name = num_bad = '' gsp_kwh = msp_kwh = md = non_actual = 0 date_at_md = kvarh_at_md = None mpan_core = era.imp_mpan_core if is_import else era.exp_mpan_core if mpan_core is not None: if num_bad == '': num_bad = 0 mpan_core_str = mpan_core if is_import: supplier_contract_name = era.imp_supplier_contract.name llfc = era.imp_llfc sc = era.imp_sc else: supplier_contract_name = era.exp_supplier_contract.name llfc = era.exp_llfc sc = era.exp_sc llfc_code = llfc.code sc_str = str(sc) supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) chellow.duos.duos_vb(supply_source) for hh in supply_source.hh_data: gsp_kwh += hh['gsp-kwh'] hh_msp_kwh = hh['msp-kwh'] msp_kwh += hh_msp_kwh if hh['status'] != 'A': num_bad += 1 non_actual += hh_msp_kwh if hh_msp_kwh > md: md = hh_msp_kwh date_at_md = hh['start-date'] if date_at_md is not None: kvarh_at_md = sess.query( cast(func.max(HhDatum.value), Float)).join( Channel).filter( Channel.era == era, Channel.imp_related == is_import, Channel.channel_type != 'ACTIVE', HhDatum.start_date == date_at_md).scalar() kw_at_md = md * 2 if kvarh_at_md is None: kva_at_md = 'None' else: kva_at_md = (kw_at_md ** 2 + (kvarh_at_md * 2) ** 2) ** 0.5 date_at_md_str = '' if date_at_md is None else hh_format(date_at_md) return [ llfc_code, mpan_core_str, sc_str, supplier_contract_name, msp_kwh, non_actual, gsp_kwh, kw_at_md, date_at_md_str, kva_at_md, num_bad]
def hh(data_source): try: cache = data_source.caches["rcrc"] except KeyError: cache = data_source.caches["rcrc"] = {} for hh in data_source.hh_data: try: hh["rcrc-rate"] = rcrc = cache[hh["start-date"]] except KeyError: h_start = hh["start-date"] db_id = get_non_core_contract_id("rcrc") rates = data_source.hh_rate(db_id, h_start)["rates"] try: hh["rcrc-rate"] = rcrc = cache[h_start] = ( float(rates[key_format(h_start)]) / 1000 ) except KeyError: try: dt = h_start - relativedelta(days=3) hh["rcrc-rate"] = rcrc = cache[h_start] = ( float(rates[key_format(dt)]) / 1000 ) except KeyError: raise BadRequest( "For the RCRC rate script at " + hh_format(dt) + " the rate cannot be found." ) hh["rcrc-kwh"] = hh["nbp-kwh"] hh["rcrc-gbp"] = hh["nbp-kwh"] * rcrc
def content(user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names("llfcs.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") writer.writerow( ( "Chellow Id", "DNO Code", "Code", "Description", "Voltage Level", "Is Substation?", "Is Import?", "Valid From", "Valid To", ) ) for llfc in ( sess.query(Llfc) .order_by(Llfc.id) .options(joinedload(Llfc.dno), joinedload(Llfc.voltage_level)) ): writer.writerow( ( str(llfc.id), llfc.dno.dno_code, llfc.code, llfc.description, llfc.voltage_level.code, llfc.is_substation, llfc.is_import, hh_format(llfc.valid_from), hh_format(llfc.valid_to), ) ) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _parse_Market_Participant_Role(sess, csv_reader): rows = [] for values in csv_reader: participant_code = values[0] market_role_code = values[1] valid_from = parse_date(values[2]) party = (sess.query(Party).join(Participant).join(MarketRole).filter( Party.valid_from == valid_from, Participant.code == participant_code, MarketRole.code == market_role_code, ).first()) valid_to = parse_to_date(values[3]) name = values[4] dno_code_str = values[14] dno_code = None if len(dno_code_str) == 0 else dno_code_str if dno_code == "99": continue if party is None: row = ( "insert", "party", market_role_code, participant_code, name, hh_format(valid_from), hh_format(valid_to, ongoing_str=""), dno_code_str, ) rows.append(row) elif any((name != party.name, dno_code != party.dno_code, valid_to != party.valid_to)): row = ( "update", "party", market_role_code, participant_code, name, hh_format(valid_from), hh_format(valid_to, ongoing_str=""), dno_code_str, ) rows.append(row) return rows
def hh(data_source): rate_set = data_source.supplier_rate_sets['bsuos-rate'] bill = data_source.supplier_bill try: bsuos_cache = data_source.caches['bsuos'] except KeyError: data_source.caches['bsuos'] = {} bsuos_cache = data_source.caches['bsuos'] try: future_funcs = data_source.caches['future_funcs'] except KeyError: future_funcs = {} data_source.caches['future_funcs'] = future_funcs db_id = get_non_core_contract_id('bsuos') try: future_funcs[db_id] except KeyError: future_funcs[db_id] = { 'start_date': None, 'func': create_future_func(1, 0)} for h in data_source.hh_data: try: h['bsuos-gbp-per-kwh'] = bsuos_rate = bsuos_cache[h['start-date']] except KeyError: h_start = h['start-date'] db_id = get_non_core_contract_id('bsuos') rates = data_source.hh_rate(db_id, h_start, 'rates_gbp_per_mwh') try: h['bsuos-gbp-per-kwh'] = bsuos_rate = bsuos_cache[h_start] = \ float(rates[h_start.strftime("%d %H:%M Z")]) / 1000 except KeyError: raise BadRequest( "For the BSUoS rate script at " + hh_format(h_start) + " the rate cannot be found.") except TypeError as e: raise BadRequest( "For the BSUoS rate script at " + hh_format(h_start) + " the rate 'rates_gbp_per_mwh' has the problem: " + str(e)) bill['bsuos-kwh'] += h['nbp-kwh'] h['bsuos-gbp'] = h['nbp-kwh'] * bsuos_rate bill['bsuos-gbp'] += h['bsuos-gbp'] rate_set.add(bsuos_rate)
def content(user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'site_snags.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ('Chellow Id', 'Site Code', 'Site Name', 'Snag Description', 'Start Date', 'Finish Date', 'Days Since Snag Finished', 'Duration Of Snag (Days)', 'Is Ignored?')) now = Datetime.now(pytz.utc) for snag in sess.query(Snag).join(Site).filter( Snag.site != null()).order_by(Site.code, Snag.description, Snag.start_date, Snag.id).options( joinedload(Snag.site)): snag_start = snag.start_date snag_finish = snag.finish_date if snag_finish is None: duration = now - snag_start age_of_snag = datetime.timedelta(0) else: duration = snag_finish - snag_start age_of_snag = now - snag_finish writer.writerow( (str(snag.id), snag.site.code, snag.site.name, snag.description, hh_format(snag_start), hh_format(snag_finish), str(age_of_snag.days + age_of_snag.seconds / (3600 * 24)), str(duration.days + duration.seconds / (3600 * 24)), str(snag.is_ignored))) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def g_rates(sess, caches, g_contract_id, date): try: return caches['g_engine']['rates'][g_contract_id][date] except KeyError: try: ccache = caches['g_engine'] except KeyError: ccache = caches['g_engine'] = {} try: rss_cache = ccache['rates'] except KeyError: rss_cache = ccache['rates'] = {} try: cont_cache = rss_cache[g_contract_id] except KeyError: cont_cache = rss_cache[g_contract_id] = {} try: return cont_cache[date] except KeyError: month_after = date + relativedelta(months=1) + relativedelta( days=1) month_before = date - relativedelta(months=1) - relativedelta( days=1) rs = sess.query(GRateScript).filter( GRateScript.g_contract_id == g_contract_id, GRateScript.start_date <= date, or_(GRateScript.finish_date == null(), GRateScript.finish_date >= date)).first() if rs is None: rs = sess.query(GRateScript).filter( GRateScript.g_contract_id == g_contract_id).order_by( GRateScript.start_date.desc()).first() if date < rs.start_date: cstart = month_before cfinish = min(month_after, rs.start_date - HH) else: cstart = max(rs.finish_date + HH, month_before) cfinish = month_after else: cstart = max(rs.start_date, month_before) if rs.finish_date is None: cfinish = month_after else: cfinish = min(rs.finish_date, month_after) vals = PropDict( "the local rate script for contract " + str(g_contract_id) + " at " + hh_format(cstart) + ".", loads(rs.script), []) for dt in hh_range(caches, cstart, cfinish): if dt not in cont_cache: cont_cache[dt] = vals return vals
def make_val(v): if isinstance(v, set): if len(v) == 1: return make_val(v.pop()) else: return '' elif isinstance(v, Datetime): return hh_format(v) else: return v
def to_val(v): if isinstance(v, Datetime): return hh_format(v) elif isinstance(v, set): if len(v) == 1: return to_val(v.pop()) else: return '' else: return str(v)
def hh(data_source): ssp_rate_set = data_source.supplier_rate_sets['ssp-rate'] sbp_rate_set = data_source.supplier_rate_sets['sbp-rate'] for h in data_source.hh_data: try: sbp, ssp = data_source.caches['system_price'][h['start-date']] except KeyError: try: system_price_cache = data_source.caches['system_price'] except KeyError: system_price_cache = data_source.caches['system_price'] = {} db_id = get_non_core_contract_id('system_price') h_start = h['start-date'] rates = data_source.hh_rate(db_id, h_start)['gbp_per_nbp_mwh'] try: try: rdict = rates[key_format(h_start)] except KeyError: rdict = rates[key_format(h_start - Timedelta(days=3))] sbp = float(rdict['sbp'] / 1000) ssp = float(rdict['ssp'] / 1000) system_price_cache[h_start] = (sbp, ssp) except KeyError: raise BadRequest("For the System Price rate script at " + hh_format(h_start) + " the rate cannot be found.") except TypeError: raise BadRequest( "For the System Price rate script at " + hh_format(h_start) + " the rate 'rates_gbp_per_mwh' has the problem: " + traceback.format_exc()) h['sbp'] = sbp h['sbp-gbp'] = h['nbp-kwh'] * sbp sbp_rate_set.add(sbp) h['ssp'] = ssp h['ssp-gbp'] = h['nbp-kwh'] * ssp ssp_rate_set.add(ssp)
def make_val(v): if isinstance(v, set): if len(v) == 1: return make_val(v.pop()) else: return "" elif isinstance(v, Datetime): return hh_format(v) else: return v
def hh(data_source): rate_set = data_source.supplier_rate_sets['tlm'] try: cache = data_source.caches['tlms'] except KeyError: cache = {} data_source.caches['tlms'] = cache try: future_funcs = data_source.caches['future_funcs'] except KeyError: future_funcs = {} data_source.caches['future_funcs'] = future_funcs db_id = get_non_core_contract_id('tlms') try: future_funcs[db_id] except KeyError: future_funcs[db_id] = {'start_date': None, 'func': tlms_future} for h in data_source.hh_data: try: h['tlm'] = tlm = cache[h['start-date']] except KeyError: h_start = h['start-date'] db_id = get_non_core_contract_id('tlms') rates = data_source.hh_rate(db_id, h_start, 'tlms') try: h['tlm'] = tlm = cache[h_start] = \ rates[h_start.strftime("%d %H:%M Z")] except KeyError: raise BadRequest( "For the TLMs rate script at " + hh_format(h_start) + " the rate cannot be found.") except TypeError as e: raise BadRequest( "For the TLMs rate script at " + hh_format(h_start) + " the rate 'tlms' has the problem: " + str(e)) rate_set.add(tlm) h['nbp-kwh'] = h['gsp-kwh'] * tlm
def hh(data_source): for h in data_source.hh_data: try: sbp, ssp = data_source.caches["system_price"][h["start-date"]] except KeyError: try: system_price_cache = data_source.caches["system_price"] except KeyError: system_price_cache = data_source.caches["system_price"] = {} db_id = get_non_core_contract_id("system_price") h_start = h["start-date"] rates = data_source.hh_rate(db_id, h_start)["gbp_per_nbp_mwh"] try: try: rdict = rates[key_format(h_start)] except KeyError: rdict = rates[key_format(h_start - Timedelta(days=3))] sbp = float(rdict["sbp"] / 1000) ssp = float(rdict["ssp"] / 1000) system_price_cache[h_start] = (sbp, ssp) except KeyError: raise BadRequest( "For the System Price rate script at " + hh_format(h_start) + " the rate cannot be found." ) except TypeError: raise BadRequest( "For the System Price rate script at " + hh_format(h_start) + " the rate 'rates_gbp_per_mwh' has the problem: " + traceback.format_exc() ) h["sbp"] = sbp h["sbp-gbp"] = h["nbp-kwh"] * sbp h["ssp"] = ssp h["ssp-gbp"] = h["nbp-kwh"] * ssp
def csv_str(row): frow = [] for cell in row: if cell is None: val = "" elif isinstance(cell, Datetime): val = hh_format(cell) else: val = str(cell) frow.append(val) return ",".join('"' + v + '"' for v in frow) + "\n"
def _parse_MTC_Meter_Type(sess, csv_reader): rows = [] for values in csv_reader: code = values[0] description = values[1] valid_from = parse_date(values[2]) valid_to = parse_to_date(values[3]) pt = (sess.query(MeterType).filter( MeterType.code == code, MeterType.valid_from == valid_from).first()) if pt is None: row = ( "insert", "meter_type", code, description, hh_format(valid_from), hh_format(valid_to, ongoing_str=""), ) rows.append(row) elif (description, valid_from, valid_to) != ( pt.description, pt.valid_from, pt.valid_to, ): row = ( "update", "meter_type", code, description, hh_format(valid_from), hh_format(valid_to), ) rows.append(row) return rows
def do_get(sess): batch_id = bill_id = contract_id = start_date = finish_date = None if "mpan_cores" in request.values: mpan_cores = req_str("mpan_cores").splitlines() else: mpan_cores = [] fname_additional = "" if "batch_id" in request.values: batch_id = req_int("batch_id") batch = Batch.get_by_id(sess, batch_id) fname_additional = f"_batch_{batch.reference}" elif "bill_id" in request.values: bill_id = req_int("bill_id") bill = Bill.get_by_id(sess, bill_id) fname_additional = "bill_" + str(bill.id) elif "contract_id" in request.values: contract_id = req_int("contract_id") contract = Contract.get_by_id(sess, contract_id) start_date = req_date("start_date") finish_date = req_date("finish_date") s = ["contract", str(contract.id)] for dt in (start_date, finish_date): s.append(hh_format(dt).replace(" ", "T").replace(":", "")) fname_additional = "_".join(s) else: raise BadRequest( "The bill check needs a batch_id, a bill_id or a start_date " "and finish_date.") args = ( batch_id, bill_id, contract_id, start_date, finish_date, g.user, mpan_cores, fname_additional, ) print(args) threading.Thread(target=content, args=args).start() return chellow_redirect("/downloads", 303)
def content(start_date, finish_date, g_contract_id, user): report_context = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "gas_virtual_bills.csv", user ) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") g_contract = GContract.get_by_id(sess, g_contract_id) forecast_dt = forecast_date() month_start = utc_datetime(start_date.year, start_date.month, 1) month_finish = month_start + relativedelta(months=1) - HH bill_titles = contract_func(report_context, g_contract, "virtual_bill_titles")() writer.writerow( ["MPRN", "Site Code", "Site Name", "Account", "From", "To"] + bill_titles ) while not month_start > finish_date: period_start = hh_max(start_date, month_start) period_finish = hh_min(finish_date, month_finish) for g_era in ( sess.query(GEra) .distinct() .filter( GEra.g_contract == g_contract, GEra.start_date <= period_finish, or_(GEra.finish_date == null(), GEra.finish_date >= period_start), ) ): chunk_start = hh_max(g_era.start_date, period_start) chunk_finish = hh_min(g_era.finish_date, period_finish) data_source = GDataSource( sess, chunk_start, chunk_finish, forecast_dt, g_era, report_context, None, ) site = ( sess.query(Site) .join(SiteGEra) .filter(SiteGEra.g_era == g_era, SiteGEra.is_physical == true()) .one() ) vals = [ data_source.mprn, site.code, site.name, data_source.account, hh_format(data_source.start_date), hh_format(data_source.finish_date), ] contract_func(report_context, g_contract, "virtual_bill")(data_source) bill = data_source.bill for title in bill_titles: if title in bill: val = make_val(bill[title]) del bill[title] else: val = "" vals.append(val) for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = None try: sess = Session() contract = Contract.get_hhdc_by_id(sess, contract_id) finish_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months - 1) forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( 'hhdc_virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') bill_titles = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles')() header_titles = [ 'Import MPAN Core', 'Export MPAN Core', 'Start Date', 'Finish Date'] vb_func = chellow.computer.contract_func( caches, contract, 'virtual_bill') writer.writerow(header_titles + bill_titles) for era in sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.hhdc_contract == contract).order_by(Era.supply_id): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = '' is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = '' if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish)] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(str(bill.get(title, ''))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) except BadRequest as e: f.write("Problem " + e.description + traceback.format_exc() + '\n') except: msg = "Problem " + traceback.format_exc() + '\n' f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(table, version, fin, user): sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( table + '_' + version + '_general_import.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') reader = iter(csv.reader(fin)) next(reader) if table == 'Line_Loss_Factor_Class': VOLTAGE_LEVEL_CODES = set( [v.code for v in sess.query(VoltageLevel)]) DNO_MAP = dict( (dno.participant.code, dno) for dno in sess.query(Party). join(MarketRole).filter(MarketRole.code == 'R').options( joinedload(Party.participant))) for i, values in enumerate(reader): participant_code = values[0] # market_role_code = values[1] llfc_code = values[3].zfill(3) valid_from = parse_date(values[4]) description = values[5] is_import = values[6] in ('A', 'B') is_substation = any( p in description for p in ( '_SS', ' SS', ' S/S', '(S/S)', 'sub', 'Sub')) valid_to = parse_to_date(values[7]) voltage_level_code = 'LV' description_upper = description.upper() for vl_code in VOLTAGE_LEVEL_CODES: if vl_code in description_upper: voltage_level_code = vl_code break try: dno = DNO_MAP[participant_code] except KeyError: w.writerow( ( "# There is no DNO with participant code ", participant_code)) continue llfc = sess.query(Llfc).filter( Llfc.dno == dno, Llfc.code == llfc_code, Llfc.valid_from == valid_from).first() if llfc is None: w.writerow( ( 'insert', 'llfc', dno.dno_code, llfc_code, description, voltage_level_code, is_substation, is_import, hh_format(valid_from), hh_format(valid_to, ongoing_str=''))) elif any( ( description != llfc.description, voltage_level_code != llfc.voltage_level.code, is_substation != llfc.is_substation, is_import != llfc.is_import, valid_to != llfc.valid_to)): w.writerow( ( 'update', 'llfc', dno.dno_code, llfc.code, hh_format(llfc.valid_from), description, voltage_level_code, is_substation, is_import, hh_format(valid_to, ongoing_str=''))) elif table == 'Market_Participant': for i, values in enumerate(reader): participant_code = values[0] participant_name = values[1] participant = sess.query(Participant).filter( Participant.code == participant_code).first() if participant is None: w.writerow( ( 'insert', 'participant', participant_code, participant_name)) elif participant_name != participant.name: w.writerow( ( 'update', 'participant', participant_code, participant_name)) elif table == 'Market_Role': for i, values in enumerate(reader): role_code = values[0] role_description = values[1] role = sess.query(MarketRole).filter( MarketRole.code == role_code).first() if role is None: w.writerow( ( 'insert', 'market_role', role_code, role_description)) elif role_description != role.description: w.writerow( ( 'update', 'market_role', role_code, role_description)) elif table == 'Market_Participant_Role': for i, values in enumerate(reader): participant_code = values[0] market_role_code = values[1] valid_from = parse_date(values[2]) party = sess.query(Party).join(Participant). \ join(MarketRole).filter( Party.valid_from == valid_from, Participant.code == participant_code, MarketRole.code == market_role_code).first() valid_to = parse_to_date(values[3]) name = values[4] dno_code_str = values[14] dno_code = None if len(dno_code_str) == 0 else dno_code_str if dno_code == '99': continue if party is None: w.writerow( ( 'insert', 'party', market_role_code, participant_code, name, hh_format(valid_from), hh_format(valid_to, ongoing_str=''), dno_code_str)) elif any( ( name != party.name, dno_code != party.dno_code, valid_to != party.valid_to)): w.writerow( ( 'update', 'party', market_role_code, participant_code, name, hh_format(valid_from), hh_format(valid_to, ongoing_str=''), dno_code_str)) elif table == 'Meter_Timeswitch_Class': for i, values in enumerate(reader): code = values[0].zfill(3) valid_from = parse_date(values[1]) valid_to = parse_to_date(values[2]) description = values[3] is_common = values[4] == 'T' has_related_metering = values[5] == 'T' meter_type_code = values[6] meter_payment_type_code = values[7] has_comms = values[8] == 'T' is_hh = values[9] == 'H' tpr_count_str = values[10] tpr_count = 0 if tpr_count_str == '' else int(tpr_count_str) if is_common: mtc = sess.query(Mtc).filter( Mtc.dno == null(), Mtc.code == code, Mtc.valid_from == valid_from).first() if mtc is None: w.writerow( ( 'insert', 'mtc', '', code, description, has_related_metering, has_comms, is_hh, meter_type_code, meter_payment_type_code, tpr_count, hh_format(valid_from), hh_format(valid_to, ongoing_str=''))) elif any( ( description != mtc.description, has_related_metering != mtc.has_related_metering, has_comms != mtc.has_comms, is_hh != mtc.is_hh, meter_type_code != mtc.meter_type.code, meter_payment_type_code != mtc.meter_payment_type.code, tpr_count != mtc.tpr_count, valid_to != mtc.valid_to)): w.writerow( ( 'update', 'mtc', '', mtc.code, description, has_related_metering, has_comms, is_hh, meter_type_code, meter_payment_type_code, tpr_count, hh_format(mtc.valid_from), hh_format(valid_to, ongoing_str=''))) elif table == 'MTC_in_PES_Area': dnos = dict( (p.participant.code, (p.id, p.dno_code)) for p in sess.query( Party).join(Participant).join(MarketRole).filter( MarketRole.code == 'R').options( joinedload(Party.participant))) mtcs = dict( ((m.dno_id, m.code, m.valid_from), m) for m in sess.query(Mtc).options( joinedload(Mtc.meter_type), joinedload(Mtc.meter_payment_type)).all()) for i, values in enumerate(reader): code_str = values[0] if not Mtc.has_dno(code_str): continue code_int = int(code_str) code = code_str.zfill(3) participant_code = values[2] dno_id, dno_code = dnos[participant_code] valid_from = parse_date(values[3]) valid_to = parse_to_date(values[4]) description = values[5] meter_type_code = values[6] meter_payment_type_code = values[7] has_related_metering = code_int > 500 has_comms = values[8] == 'Y' is_hh = values[9] == 'H' tpr_count_str = values[10] tpr_count = 0 if tpr_count_str == '' else int(tpr_count_str) mtc = mtcs.get((dno_id, code, valid_from)) if mtc is None: w.writerow( ( 'insert', 'mtc', dno_code, code, description, has_related_metering, has_comms, is_hh, meter_type_code, meter_payment_type_code, tpr_count, hh_format(valid_from), hh_format(valid_to, ongoing_str=''))) elif any( ( description != mtc.description, has_related_metering != mtc.has_related_metering, has_comms != mtc.has_comms, is_hh != mtc.is_hh, meter_type_code != mtc.meter_type.code, meter_payment_type_code != mtc.meter_payment_type.code, tpr_count != mtc.tpr_count, valid_to != mtc.valid_to)): w.writerow( ( 'update', 'mtc', mtc.dno.dno_code, mtc.code, description, has_related_metering, has_comms, is_hh, meter_type_code, meter_payment_type_code, tpr_count, hh_format(mtc.valid_from), hh_format(valid_to, ongoing_str=''))) elif table == 'MTC_Meter_Type': for i, values in enumerate(reader): code = values[0] description = values[1] valid_from = parse_date(values[2]) valid_to = parse_to_date(values[3]) pt = sess.query(MeterType).filter( MeterType.code == code, MeterType.valid_from == valid_from).first() if pt is None: w.writerow( ( 'insert', 'meter_type', code, description, hh_format(valid_from), hh_format(valid_to, ongoing_str=''))) elif (description, valid_from, valid_to) != ( pt.description, pt.valid_from, pt.valid_to): w.writerow( ( 'update', 'meter_type', code, description, hh_format(valid_from), hh_format(valid_to))) else: raise Exception("The table " + table + " is not recognized.") except BaseException: w.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def test_hh_format_hh_46(): dt = to_utc(ct_datetime(2019, 3, 31, 23, 30)) actual = hh_format(dt, with_hh=True) assert actual == ("2019-03-31 23:30", 46)
def test_hh_format_ct(): dt = utc_datetime(2019, 6, 30) actual = hh_format(dt) assert actual == ("2019-06-30 01:00")
def test_hh_format_hh_50(): dt = to_utc(ct_datetime(2019, 10, 27, 23, 30)) actual = hh_format(dt, with_hh=True) assert actual == ("2019-10-27 23:30", 50)
def content(year, site_id, sess): caches = {} try: march_finish = datetime.datetime(year, 4, 1, tzinfo=pytz.utc) - HH march_start = datetime.datetime(year, 3, 1, tzinfo=pytz.utc) yield ', '.join( ( "Site Code", "Site Name", "Displaced TRIAD 1 Date", "Displaced TRIAD 1 MSP kW", "Displaced TRIAD LAF", "Displaced TRIAD 1 GSP kW", "Displaced TRIAD 2 Date", "Displaced TRIAD 2 MSP kW", "Displaced TRIAD 2 LAF", "Displaced TRIAD 2 GSP kW", "Displaced TRIAD 3 Date", "Displaced TRIAD 3 MSP kW", "Displaced TRIAD 3 LAF", "Displaced TRIAD 3 GSP kW", "Displaced GSP kW", "Displaced Rate GBP / kW", "GBP")) + '\n' forecast_date = chellow.computer.forecast_date() if site_id is None: sites = sess.query(Site).join(SiteEra).join(Era).join(Supply).join( Source).filter( Source.code.in_(('gen', 'gen-net')), Era.start_date <= march_finish, or_( Era.finish_date == null(), Era.finish_date >= march_start)).distinct() else: site = Site.get_by_id(sess, site_id) sites = sess.query(Site).filter(Site.id == site.id) for site in sites: for site_group in site.groups( sess, march_start, march_finish, True): if site_group.start_date > march_start: chunk_start = site_group.start_date else: chunk_start = march_start if not site_group.finish_date < march_finish: chunk_finish = march_finish else: continue yield '"' + site.code + '","' + site.name + '"' displaced_era = chellow.computer.displaced_era( sess, site_group, chunk_start, chunk_finish) if displaced_era is None: continue site_ds = chellow.computer.SiteSource( sess, site, chunk_start, chunk_finish, forecast_date, None, caches, displaced_era) chellow.duos.duos_vb(site_ds) chellow.triad.hh(site_ds) chellow.triad.bill(site_ds) bill = site_ds.supplier_bill for rname, rset in site_ds.supplier_rate_sets.items(): if len(rset) == 1: bill[rname] = rset.pop() values = [] for i in range(1, 4): triad_prefix = 'triad-actual-' + str(i) for suffix in ['-date', '-msp-kw', '-laf', '-gsp-kw']: values.append(bill[triad_prefix + suffix]) values += [ bill['triad-actual-' + suf] for suf in [ 'gsp-kw', 'rate', 'gbp']] for value in values: if isinstance(value, datetime.datetime): yield "," + hh_format(value) else: yield "," + str(value) yield '\n' except: yield traceback.format_exc()
def content(year, month, months, supply_id, user): tmp_file = sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).distinct() if supply_id is None: base_name = "supplies_monthly_duration_for_all_supplies_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = "supplies_monthly_duration_for_" + str(supply.id) + \ "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \ ".csv" running_name, finished_name = chellow.dloads.make_names( base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) field_names = ( 'supply-name', 'source-code', 'generator-type', 'month', 'pc-code', 'msn', 'site-code', 'site-name', 'metering-type', 'import-mpan-core', 'metered-import-kwh', 'metered-import-net-gbp', 'metered-import-estimated-kwh', 'billed-import-kwh', 'billed-import-net-gbp', 'export-mpan-core', 'metered-export-kwh', 'metered-export-estimated-kwh', 'billed-export-kwh', 'billed-export-net-gbp', 'problem', 'timestamp') tmp_file.write('supply-id,' + ','.join(field_names) + '\n') forecast_date = chellow.computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)): generator_type = supply.generator_type if generator_type is None: generator_type = '' else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.make_meter_category() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() values = { 'supply-name': supply.name, 'source-code': source_code, 'generator-type': generator_type, 'month': hh_format(month_finish), 'pc-code': era.pc.code, 'msn': era.msn, 'site-code': site.code, 'site-name': site.name, 'metering-type': metering_type, 'problem': ''} tmp_file.write(str(supply.id) + ',') for is_import, pol_name in [ (True, 'import'), (False, 'export')]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + '-mpan-core'] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ['hh', 'amr']: est_kwh = sess.query(HhDatum.value).join(Channel) \ .join(Era).filter( HhDatum.status == 'E', Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).first() if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ('net', 'gen-net')): kwh_sum = sess.query( cast(func.sum(HhDatum.value), Float) ).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).one()[0] if kwh_sum is not None: kwh += kwh_sum values['metered-' + pol_name + '-estimated-kwh'] = est_kwh values['metered-' + pol_name + '-kwh'] = kwh values['metered-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-kwh'] = 0 values['billed-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-apportioned-kwh'] = 0 values['billed-' + pol_name + '-apportioned-net-gbp'] = 0 values['billed-' + pol_name + '-raw-kwh'] = 0 values['billed-' + pol_name + '-raw-net-gbp'] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + 30 * 60 overlap_duration = ( min(bill_finish, month_finish) - max(bill_start, month_start)).total_seconds() + 30 * 60 overlap_proportion = float( overlap_duration) / float(bill_duration) values['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) values['billed-import-kwh'] += \ overlap_proportion * float(bill.kwh) for era in eras: if era.start_date > month_start: chunk_start = era.start_date else: chunk_start = month_start if hh_after(era.finish_date, month_finish): chunk_finish = month_finish else: chunk_finish = era.finish_date import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ['net', 'gen-net', '3rd-party']: supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) values['metered-import-kwh'] += sum( datum['msp-kwh'] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, 'virtual_bill') if import_vb_function is None: values['problem'] += "Can't find the " \ "virtual_bill function in the supplier " \ "contract. " else: import_vb_function(supply_source) values['metered-import-net-gbp'] += \ supply_source.supplier_bill['net-gbp'] supply_source.contract_func( era.hhdc_contract, 'virtual_bill')(supply_source) values['metered-import-net-gbp'] += \ supply_source.dc_bill['net-gbp'] mop_func = supply_source.contract_func( era.mop_contract, 'virtual_bill') if mop_func is None: values['problem'] += " MOP virtual_bill " \ "function can't be found." else: mop_func(supply_source) mop_bill = supply_source.mop_bill values['metered-import-net-gbp'] += \ mop_bill['net-gbp'] if len(mop_bill['problem']) > 0: values['problem'] += \ " MOP virtual bill problem: " + \ mop_bill['problem'] values['timestamp'] = int(time.time() * 1000) tmp_file.write( ','.join( '"' + str(values[name]) + '"' for name in field_names) + '\n') except: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def site_content(site_id, start_date, finish_date, user, file_name): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( file_name, user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') site = Site.get_by_id(sess, site_id) sites = sess.query(Site).filter(Site.id == site_id) start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: writer.writerow( [ "Site Code", "Site Name", "Associated Site Codes", "Sources", "Generator Types", "From", "To", "Type", "Date"] + list(map(str, range(1, 49)))) associates = ' '.join( s.code for s in site.find_linked_sites( sess, start_date, finish_date)) source_codes = set() gen_types = set() for supply in sess.query(Supply).join(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().options( joinedload(Supply.source), joinedload(Supply.generator_type)): source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) source_codes_str = ', '.join(sorted(source_codes)) gen_types_str = ', '.join(sorted(gen_types)) vals = None for hh in site.hh_data(sess, start_date, finish_date): hh_start = hh['start_date'] if hh_start.hour == 0 and hh_start.minute == 0: if vals is not None: writer.writerow(vals) vals = [ site.code, site.name, associates, source_codes_str, gen_types_str, start_date_str, finish_date_str, 'used', hh_start.strftime('%Y-%m-%d')] used_gen_kwh = hh['imp_gen'] - hh['exp_net'] - hh['exp_gen'] used_3p_kwh = hh['imp_3p'] - hh['exp_3p'] used_kwh = hh['imp_net'] + used_gen_kwh + used_3p_kwh vals.append(str(round(used_kwh, 2))) if vals is not None: writer.writerow(vals) except: msg = traceback.format_exc() sys.stderr.write(msg) f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, "w") ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] f.write( ','.join( ( 'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) + '\n') year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)): meter_type = era.make_meter_category() era_start = era.start_date period_start = era_start \ if era_start > year_start else year_start era_finish = era.finish_date if hh_after(era_finish, year_finish): period_finish = year_finish else: period_finish = era_finish max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and \ (not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: if pair_start > period_start: block_start = pair_start else: block_start = period_start if pair_finish < period_finish: block_finish = pair_finish else: block_finish = period_finish if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': bills = sess.query(Bill).filter( Bill.supply == supply, Bill.finish_date >= period_start, Bill.start_date <= period_finish) for bill in bills: total_kwh[meter_type] += kwh normal_days[meter_type] += ( ( period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 f.write( ','.join( '"' + str(val) + '"' for val in [ supply.id, mpan_core, site.code, site.name, hh_format(year_start), hh_format(year_finish), breakdown] + [ normal_days[type] for type in meter_types] + [ max_normal_days[type] for type in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[type] for type in meter_types] + [filled_kwh[type] for type in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) + '\n') # avoid a long running transaction sess.rollback() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ( 'Date', 'Physical Site Id', 'Physical Site Name', 'Other Site Ids', 'Other Site Names', 'Supply Id', 'Source', 'Generator Type', 'GSP Group', 'DNO Name', 'Voltage Level', 'Metering Type', 'Mandatory HH', 'PC', 'MTC', 'CoP', 'SSC', 'Number Of Registers', 'MOP Contract', 'Mop Account', 'HHDC Contract', 'HHDC Account', 'Meter Serial Number', 'Meter Installation Date', 'Latest Normal Meter Read Date', 'Latest Normal Meter Read Type', 'Latest DC Bill Date', 'Latest MOP Bill Date', 'Import ACTIVE?', 'Import REACTIVE_IMPORT?', 'Import REACTIVE_EXPORT?', 'Export ACTIVE?', 'Export REACTIVE_IMPORT?', 'Export REACTIVE_EXPORT?', 'Import MPAN core', 'Import Agreed Supply Capacity (kVA)', 'Import LLFC Code', 'Import LLFC Description', 'Import Supplier Contract', 'Import Supplier Account', 'Import Mandatory kW', 'Latest Import Supplier Bill Date', 'Export MPAN core', 'Export Agreed Supply Capacity (kVA)', 'Export LLFC Code', 'Export LLFC Description', 'Export Supplier Contract', 'Export Supplier Account', 'Export Mandatory kW', 'Latest Export Supplier Bill Date')) NORMAL_READ_TYPES = ('N', 'C', 'N3') year_start = date + HH - relativedelta(years=1) eras = sess.query(Era).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date)).order_by( Era.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) eras = eras.filter(Era.supply == supply) if mpan_cores is not None: eras = eras.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for era in eras: site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) supply = era.supply if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code else: voltage_level_code = era.imp_llfc.voltage_level.code if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code metering_type = era.make_meter_category() if metering_type == 'nhh': latest_prev_normal_read = sess.query(RegisterRead). \ join(Bill).join(RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id).order_by( RegisterRead.previous_date.desc()).first() latest_pres_normal_read = sess.query(RegisterRead) \ .join(Bill).join(RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply).order_by( RegisterRead.present_date.desc()).first() if latest_prev_normal_read is None and \ latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif latest_pres_normal_read is not None and \ latest_prev_normal_read is None: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code elif latest_pres_normal_read is None and \ latest_prev_normal_read is not None: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code elif latest_pres_normal_read.present_date > \ latest_prev_normal_read.previous_date: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code else: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = \ hh_format(latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract if mop_contract is None: mop_contract_name = '' mop_account = '' latest_mop_bill_date = 'No MOP' else: mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract).order_by( Bill.finish_date.desc()).first() if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) hhdc_contract = era.hhdc_contract if hhdc_contract is None: hhdc_contract_name = '' hhdc_account = '' latest_hhdc_bill_date = 'No HHDC' else: hhdc_contract_name = hhdc_contract.name hhdc_account = era.hhdc_account latest_hhdc_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == hhdc_contract).order_by( Bill.finish_date.desc()).first() if latest_hhdc_bill_date is not None: latest_hhdc_bill_date = hh_format(latest_hhdc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel( sess, imp_related, channel_type) is None: channel_values.append('false') else: channel_values.append('true') imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == 'nhh': continue params = { 'supply_id': supply.id, 'year_start': year_start, 'year_finish': date, 'is_import': is_import} month_mds = tuple( md[0] * 2 for md in sess.execute(""" select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params)) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or \ (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = 'yes' else: mandatory_hh = 'no' imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in [True, False]: if is_import: if era.imp_mpan_core is None: continue else: supplier_contract = era.imp_supplier_contract else: if era.exp_mpan_core is None: continue else: supplier_contract = era.exp_supplier_contract latest_supplier_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == supplier_contract).order_by( Bill.finish_date.desc()).first() if latest_supplier_bill_date is not None: latest_supplier_bill_date = \ latest_supplier_bill_date[0] latest_supplier_bill_date = hh_format( latest_supplier_bill_date) if is_import: imp_latest_supplier_bill_date = \ latest_supplier_bill_date else: exp_latest_supplier_bill_date = \ latest_supplier_bill_date meter_installation_date = sess.query(func.min(Era.start_date)) \ .filter(Era.supply == era.supply, Era.msn == era.msn).one()[0] if era.ssc is None: ssc_code = num_registers = None else: ssc_code = era.ssc.code num_registers = sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == era.ssc).count() writer.writerow( ( ('' if value is None else str(value))) for value in [ hh_format(date), physical_site.code, physical_site.name, ', '.join(site_codes), ', '.join(site_names), supply.id, supply.source.code, generator_type, supply.gsp_group.code, supply.dno_contract.name, voltage_level_code, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, num_registers, mop_contract_name, mop_account, hhdc_contract_name, hhdc_account, era.msn, hh_format(meter_installation_date), latest_normal_read_date, latest_normal_read_type, latest_hhdc_bill_date, latest_mop_bill_date] + channel_values + [ era.imp_mpan_core, era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era.imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date] + [ era.exp_mpan_core, era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era.exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date]) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def mpan_bit( sess, supply, is_import, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches): mpan_core_str = '' llfc_code = '' sc_str = '' supplier_contract_name = '' gsp_kwh = '' for era in eras: mpan_core = era.imp_mpan_core if is_import else era.exp_mpan_core if mpan_core is None: continue mpan_core_str = mpan_core if is_import: supplier_contract_name = era.imp_supplier_contract.name llfc = era.imp_llfc sc = era.imp_sc else: supplier_contract_name = era.exp_supplier_contract.name llfc = era.exp_llfc sc = era.exp_sc llfc_code = llfc.code sc_str = str(sc) if llfc.is_import and era.pc.code == '00' and \ supply.source.code not in ('gen') and \ supply.dno_contract.name != '99': if gsp_kwh == '': gsp_kwh = 0 if chunk_start > era.start_date: block_start = chunk_start else: block_start = era.start_date if hh_before(chunk_finish, era.finish_date): block_finish = chunk_finish else: block_finish = era.finish_date supply_source = chellow.computer.SupplySource( sess, block_start, block_finish, forecast_date, era, is_import, None, caches) chellow.duos.duos_vb(supply_source) gsp_kwh += sum( [datum['gsp-kwh'] for datum in supply_source.hh_data]) md = 0 sum_kwh = 0 non_actual = 0 date_at_md = None kvarh_at_md = None num_na = 0 for datum in sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.imp_related == is_import, Channel.channel_type == 'ACTIVE', HhDatum.start_date >= chunk_start, HhDatum.start_date <= chunk_finish).order_by(HhDatum.id): hh_value = float(datum.value) hh_status = datum.status if hh_value > md: md = hh_value date_at_md = datum.start_date kvarh_at_md = sess.query( cast(func.max(HhDatum.value), Float)).join( Channel).join(Era).filter( Era.supply == supply, Channel.imp_related == is_import, Channel.channel_type != 'ACTIVE', HhDatum.start_date == date_at_md).one()[0] sum_kwh += hh_value if hh_status != 'A': non_actual += hh_value num_na += 1 kw_at_md = md * 2 if kvarh_at_md is None: kva_at_md = 'None' else: kva_at_md = (kw_at_md ** 2 + (kvarh_at_md * 2) ** 2) ** 0.5 num_bad = num_hh - sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, Channel.imp_related == is_import, Channel.channel_type == 'ACTIVE', HhDatum.start_date >= chunk_start, HhDatum.start_date <= chunk_finish).count() + num_na date_at_md_str = '' if date_at_md is None else hh_format(date_at_md) return ','.join(str(val) for val in [ llfc_code, mpan_core_str, sc_str, supplier_contract_name, sum_kwh, non_actual, gsp_kwh, kw_at_md, date_at_md_str, kva_at_md, num_bad])
def content(supply_id, start_date, finish_date, user): forecast_date = datetime.datetime.max.replace(tzinfo=pytz.utc) caches = {} f = sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, "w") f.write( ','.join( ( "Supply Id", "Supply Name", "Source", "Generator Type", "Site Ids", "Site Names", "From", "To", "PC", "MTC", "CoP", "SSC", "Normal Reads", "Type", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs"))) supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supplies = supplies.filter( Supply.id == Supply.get_by_id(sess, supply_id).id) for supply in supplies: site_codes = '' site_names = '' eras = supply.find_eras(sess, start_date, finish_date) era = eras[-1] for site_era in era.site_eras: site = site_era.site site_codes = site_codes + site.code + ', ' site_names = site_names + site.name + ', ' site_codes = site_codes[:-2] site_names = site_names[:-2] if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add( str(rdate) + "_" + read.msn) supply_type = era.make_meter_category() if eras[0].start_date > start_date: chunk_start = eras[0].start_date else: chunk_start = start_date if hh_before(finish_date, era.finish_date): chunk_finish = finish_date else: chunk_finish = era.finish_date num_hh = int( (chunk_finish - (chunk_start - HH)).total_seconds() / (30 * 60)) f.write( '\n' + ','.join( ('"' + str(value) + '"') for value in [ supply.id, supply.name, supply.source.code, generator_type, site_codes, site_names, hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, len(prime_reads), supply_type]) + ',') f.write( mpan_bit( sess, supply, True, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches) + "," + mpan_bit( sess, supply, False, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches)) except: f.write(traceback.format_exc()) finally: sess.close() f.close() os.rename(running_name, finished_name)
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'displaced.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') titles = [ 'Site Code', 'Site Name', 'Associated Site Ids', 'From', 'To', 'Gen Types', 'CHP kWh', 'LM kWh', 'Turbine kWh', 'PV kWh'] finish_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) forecast_date = chellow.computer.forecast_date() contract = Contract.get_supplier_by_id(sess, contract_id) sites = sess.query(Site).join(SiteEra).join(Era).join(Supply). \ join(Source).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, or_( Source.code.in_(('gen', 'gen-net')), Era.exp_mpan_core != null())).distinct() bill_titles = chellow.computer.contract_func( caches, contract, 'displaced_virtual_bill_titles', None)() for title in bill_titles: if title == 'total-msp-kwh': title = 'total-displaced-msp-kwh' titles.append(title) writer.writerow(titles) for site in sites: month_start = start_date month_finish = month_start + relativedelta(months=1) - HH while not month_finish > finish_date: for site_group in site.groups( sess, month_start, month_finish, True): if site_group.start_date > month_start: chunk_start = site_group.start_date else: chunk_start = month_start if site_group.finish_date > month_finish: chunk_finish = month_finish else: chunk_finish = site_group.finish_date displaced_era = chellow.computer.displaced_era( sess, site_group, chunk_start, chunk_finish) if displaced_era is None: continue supplier_contract = displaced_era.imp_supplier_contract if contract is not None and contract != supplier_contract: continue linked_sites = ','.join( a_site.code for a_site in site_group.sites if not a_site == site) generator_types = ' '.join( sorted( [ supply.generator_type.code for supply in site_group.supplies if supply.generator_type is not None])) vals = [ site.code, site.name, linked_sites, hh_format(chunk_start), hh_format(chunk_finish), generator_types] total_gen_breakdown = {} results = iter( sess.execute( "select supply.id, hh_datum.value, " "hh_datum.start_date, channel.imp_related, " "source.code, generator_type.code as " "gen_type_code from hh_datum, channel, source, " "era, supply left outer join generator_type on " "supply.generator_type_id = generator_type.id " "where hh_datum.channel_id = channel.id and " "channel.era_id = era.id and era.supply_id = " "supply.id and supply.source_id = source.id and " "channel.channel_type = 'ACTIVE' and not " "(source.code = 'net' and channel.imp_related " "is true) and hh_datum.start_date >= " ":chunk_start and hh_datum.start_date " "<= :chunk_finish and " "supply.id = any(:supply_ids) order " "by hh_datum.start_date, supply.id", params={ 'chunk_start': chunk_start, 'chunk_finish': chunk_finish, 'supply_ids': [ s.id for s in site_group.supplies]})) ( sup_id, hh_val, hh_start, imp_related, source_code, gen_type_code) = next( results, (None, None, None, None, None, None)) hh_date = chunk_start while hh_date <= finish_date: gen_breakdown = {} exported = 0 while hh_start == hh_date: if not imp_related and source_code in ( 'net', 'gen-net'): exported += hh_val if (imp_related and source_code == 'gen') or \ (not imp_related and source_code == 'gen-net'): gen_breakdown[gen_type_code] = \ gen_breakdown.setdefault( gen_type_code, 0) + hh_val if ( not imp_related and source_code == 'gen') or ( imp_related and source_code == 'gen-net'): gen_breakdown[gen_type_code] = \ gen_breakdown.setdefault( gen_type_code, 0) - hh_val ( sup_id, hh_val, hh_start, imp_related, source_code, gen_type_code) = next( results, (None, None, None, None, None, None)) displaced = sum(gen_breakdown.values()) - exported added_so_far = 0 for key in sorted(gen_breakdown.keys()): kwh = gen_breakdown[key] if kwh + added_so_far > displaced: total_gen_breakdown[key] = \ total_gen_breakdown.get(key, 0) + \ displaced - added_so_far break else: total_gen_breakdown[key] = \ total_gen_breakdown.get(key, 0) + kwh added_so_far += kwh hh_date += HH for title in ['chp', 'lm', 'turb', 'pv']: vals.append(str(total_gen_breakdown.get(title, ''))) site_ds = chellow.computer.SiteSource( sess, site, chunk_start, chunk_finish, forecast_date, None, caches, displaced_era) disp_func = chellow.computer.contract_func( caches, supplier_contract, 'displaced_virtual_bill', None) disp_func(site_ds) bill = site_ds.supplier_bill for title in bill_titles: if title in bill: val = bill[title] if isinstance(val, Datetime): val = hh_format(val) else: val = str(val) vals.append(val) del bill[title] else: vals.append('') for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def site_content(site_id, start_date, finish_date, user, file_name): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( file_name, user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") site = Site.get_by_id(sess, site_id) sites = sess.query(Site).filter(Site.id == site_id) start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: writer.writerow([ "Site Code", "Site Name", "Associated Site Codes", "Sources", "Generator Types", "From", "To", "Type", "Date", ] + list(map(str, range(1, 51)))) associates = " ".join( s.code for s in site.find_linked_sites(sess, start_date, finish_date)) source_codes = set() gen_types = set() for supply in (sess.query(Supply).join(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ).distinct().options(joinedload(Supply.source), joinedload(Supply.generator_type))): source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) source_codes_str = ", ".join(sorted(source_codes)) gen_types_str = ", ".join(sorted(gen_types)) vals = None for hh in site.hh_data(sess, start_date, finish_date): hh_start_ct = to_ct(hh["start_date"]) if hh_start_ct.hour == 0 and hh_start_ct.minute == 0: if vals is not None: writer.writerow(vals) vals = [ site.code, site.name, associates, source_codes_str, gen_types_str, start_date_str, finish_date_str, "used", hh_start_ct.strftime("%Y-%m-%d"), ] used_gen_kwh = hh["imp_gen"] - hh["exp_net"] - hh["exp_gen"] used_3p_kwh = hh["imp_3p"] - hh["exp_3p"] used_kwh = hh["imp_net"] + used_gen_kwh + used_3p_kwh vals.append(str(round(used_kwh, 2))) if vals is not None: writer.writerow(vals) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(batch_id, bill_id, user): caches = {} tmp_file = sess = None forecast_date = Datetime.max.replace(tzinfo=pytz.utc) try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'bill_check.csv', user) tmp_file = open(running_name, mode='w', newline='') writer = csv.writer(tmp_file, lineterminator='\n') if batch_id is not None: batch = Batch.get_by_id(sess, batch_id) bills = sess.query(Bill).filter( Bill.batch_id == batch.id).order_by(Bill.reference) elif bill_id is not None: bill = Bill.get_by_id(sess, bill_id) bills = sess.query(Bill).filter(Bill.id == bill.id) batch = bill.batch contract = batch.contract market_role_code = contract.market_role.code vbf = chellow.computer.contract_func( caches, contract, 'virtual_bill', None) if vbf is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill.") virtual_bill_titles_func = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles', None) if virtual_bill_titles_func is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill_titles.") virtual_bill_titles = virtual_bill_titles_func() titles = [ 'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp', 'bill-vat-gbp', 'bill-start-date', 'bill-finish-date', 'bill-mpan-core', 'site-code', 'site-name', 'covered-from', 'covered-to', 'covered-bills', 'metered-kwh'] for t in virtual_bill_titles: titles.append('covered-' + t) titles.append('virtual-' + t) if t.endswith('-gbp'): titles.append('difference-' + t) writer.writerow(titles) for bill in bills: problem = '' supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: problem += "The MSN " + read_msn + \ " of the register read " + str(read.id) + \ " doesn't match the MSN of the era." for dt, type in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type)]: key = str(dt) + "-" + read.msn try: if type != read_dict[key]: problem += " Reads taken on " + str(dt) + \ " have differing read types." except KeyError: read_dict[key] = type bill_start = bill.start_date bill_finish = bill.finish_date era = supply.find_era_at(sess, bill.finish_date) if era is None: raise BadRequest( "Extraordinary! There isn't an era for the bill " + str(bill.id) + ".") values = [ batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, hh_format(bill_start), hh_format(bill_finish), era.imp_mpan_core] covered_start = bill_start covered_finish = bill_finish covered_bill_ids = [] covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0} covered_primary_bill = None enlarged = True while enlarged: enlarged = False covered_bills = [] cand_bills = dict( (b.id, b) for b in sess.query(Bill).join(Batch). join(Contract).join(MarketRole).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code).order_by( Bill.issue_date.desc(), Bill.start_date)) while True: to_del = None for a, b in combinations(cand_bills.values(), 2): if all( ( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross)): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del cand_bills[k] for cand_bill_id in sorted(cand_bills.keys()): cand_bill = cand_bills[cand_bill_id] if covered_primary_bill is None and \ len(cand_bill.reads) > 0: covered_primary_bill = cand_bill if cand_bill.start_date < covered_start: covered_start = cand_bill.start_date enlarged = True break if cand_bill.finish_date > covered_finish: covered_finish = cand_bill.finish_date enlarged = True break covered_bills.append(cand_bill) for covered_bill in covered_bills: covered_bill_ids.append(covered_bill.id) covered_bdown['net-gbp'] += float(covered_bill.net) covered_bdown['vat-gbp'] += float(covered_bill.vat) covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh) if len(covered_bill.breakdown) > 0: covered_rates = collections.defaultdict(set) for k, v in eval(covered_bill.breakdown, {}).items(): if k.endswith('rate'): covered_rates[k].add(v) elif k != 'raw-lines': try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None virtual_bill = {} metered_kwh = 0 for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.imp_mpan_core != null(), Era.start_date <= covered_finish, or_( Era.finish_date == null(), Era.finish_date >= covered_start)).distinct(): site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if covered_start > era.start_date: chunk_start = covered_start else: chunk_start = era.start_date if hh_before(covered_finish, era.finish_date): chunk_finish = covered_finish else: chunk_finish = era.finish_date data_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches, covered_primary_bill) if data_source.measurement_type == 'hh': metered_kwh += sum( h['msp-kwh'] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) metered_kwh += sum(h['msp-kwh'] for h in ds.hh_data) vbf(data_source) if market_role_code == 'X': vb = data_source.supplier_bill elif market_role_code == 'C': vb = data_source.dc_bill elif market_role_code == 'M': vb = data_source.mop_bill else: raise BadRequest("Odd market role.") for k, v in vb.items(): if k.endswith('-rate'): if k not in virtual_bill: virtual_bill[k] = set() virtual_bill[k].add(v) else: try: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " and value " + str(v) + ". " + str(detail)) values += [ site.code, site.name, hh_format(covered_start), hh_format(covered_finish), ','.join(str(id).replace(',', '') for id in covered_bill_ids), metered_kwh] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append('') try: virt_val = virtual_bill[title] if isinstance(virt_val, set): virt_val = ', '.join(str(v) for v in virt_val) elif isinstance(virt_val, Datetime): virt_val = hh_format(virt_val) values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = None values.append('') if title.endswith('-gbp'): if all(isinstance(val, (int, float)) for val in [ cov_val, virt_val]): values.append(cov_val - virt_val) else: values.append('') for title in sorted(virtual_bill.keys()): virt_val = virtual_bill[title] if isinstance(virt_val, set): virt_val = ', '.join(str(v) for v in virt_val) elif isinstance(virt_val, Datetime): virt_val = hh_format(virt_val) values += ['virtual-' + title, virt_val] if title in covered_bdown: values += ['covered-' + title, covered_bdown[title]] else: values += ['', ''] writer.writerow(values) except BadRequest as e: tmp_file.write("Problem: " + e.description) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user): now = Datetime.now(pytz.utc) report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = rate_start.replace(tzinfo=pytz.utc) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = Datetime( now.year, now.month, 1, tzinfo=pytz.utc) else: start_date = start_date.replace(tzinfo=pytz.utc) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = kwh_start.replace(tzinfo=pytz.utc) sites = sess.query(Site).join(SiteEra).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") f = odswriter.writer(rf, '1.1') group_tab = f.new_sheet("Site Level") sup_tab = f.new_sheet("Supply Level") changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace( tzinfo=pytz.utc) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) sup_header_titles = [ 'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr) \ .join(Era.supply).join(Source).filter( Era.start_date <= start_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles', None) if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) sup_tab.writerow( sup_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) group_tab.writerow(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_associates = set() site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) for group in site.groups( sess, month_start, month_finish, False): site_associates.update( set( s.code for s in group.sites if s.code != site.code)) for cand_supply in group.supplies: site_sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: site_gen_types.add(cand_supply.generator_type.code) for cand_era in sess.query(Era).filter( Era.supply == cand_supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)). \ options( joinedload(Era.channels), joinedload(Era.pc), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if site_category != 'hh': if cand_era.pc.code == '00': site_category = 'hh' elif site_category != 'amr': if len(cand_era.channels) > 0: site_category = 'amr' elif site_category != 'nhh': if cand_era.mtc.meter_type.code \ not in ['UM', 'PH']: site_category = 'nhh' else: site_category = 'unmetered' for group in site.groups( sess, month_start, month_finish, True): calcs = [] deltas = defaultdict(int) group_associates = set( s.code for s in group.sites if s.code != site.code) for supply in group.supplies: if supply_id is not None and supply.id != supply_id: continue for era in sess.query(Era).join(Supply) \ .join(Source).filter( Era.supply == supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)) \ .options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if era.start_date > group.start_date: ss_start = era.start_date else: ss_start = group.start_date if hh_before(era.finish_date, group.finish_date): ss_finish = era.finish_date else: ss_finish = group.finish_date if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, None, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, None, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, group, group.start_date, group.finish_date) site_ds = chellow.computer.SiteSource( sess, site, group.start_date, group.finish_date, kwh_start, None, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill', None) if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += \ 'For the supply ' + \ site_ds.mpan_core + \ ' the virtual bill ' + \ str(disp_supplier_bill) + \ ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = \ month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, ','.join(sorted(list(group_associates))), month_finish] + \ [month_data[t] for t in summary_titles] sup_tab.writerow(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill', None) if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['used-gbp'] -= gbp kwh = sum( hh['msp-kwh'] for hh in imp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-kwh'] += kwh month_data['used-kwh'] -= kwh elif source_code in ('gen', 'gen-net'): month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is None: kwh = sess.query( func.coalesce( func.sum( cast(HhDatum.value, Float)), 0)). \ join(Channel).filter( Channel.era == era, Channel.channel_type == 'ACTIVE', Channel.imp_related == false()).scalar() if source_code == 'gen': month_data['export-net-kwh'] += kwh else: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill', None) export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['export-net-kwh'] += kwh month_data['export-net-gbp'] += gbp elif source_code in \ ('3rd-party', '3rd-party-reverse'): month_data['export-3rd-party-kwh'] += kwh month_data['export-3rd-party-gbp'] += gbp month_data['used-kwh'] -= kwh month_data['used-gbp'] -= gbp elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func( dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None sup_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[sup_category]: site_category = sup_category for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = \ float(overlap_duration) / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ era.imp_mpan_core, era.exp_mpan_core, sup_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(site_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ (mop_bill[t] if t in mop_bill else None) for t in title_dict['mop']] + [None] + \ [(dc_bill[t] if t in dc_bill else None) for t in title_dict['dc']] if imp_supplier_contract is None: out += [None] * \ (len(title_dict['imp-supplier']) + 1) else: out += [None] + [ ( imp_supplier_bill[t] if t in imp_supplier_bill else None) for t in title_dict['imp-supplier']] if exp_supplier_contract is not None: out += [None] + [ ( exp_supplier_bill[t] if t in exp_supplier_bill else None) for t in title_dict['exp-supplier']] for k, v in month_data.items(): site_month_data[k] += v sup_tab.writerow(out) group_tab.writerow( [ site.code, site.name, ''.join(sorted(list(site_associates))), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) sess.rollback() month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) finally: if sess is not None: sess.close() try: f.close() rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def datum_beginning_20(ds, hh): bill = ds.supplier_bill tariff = None for k, tf in ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'tariffs').items(): if ds.llfc_code in [cd.strip() for cd in k.split(',')]: tariff = tf if tariff is None: raise BadRequest( "The tariff for the LLFC " + ds.llfc_code + " cannot be found for the DNO 20 at " + hh_format(hh['start-date']) + ".") lafs = ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'lafs')[ds.voltage_level_code.lower()] day_rate = tariff['day-gbp-per-kwh'] if 'night-gbp-per-kwh' in tariff: night_rate = tariff['night-gbp-per-kwh'] if 0 < hh['ct-decimal-hour'] <= 7: bill['duos-night-kwh'] += hh['msp-kwh'] bill['duos-night-gbp'] += hh['msp-kwh'] * night_rate else: bill['duos-day-kwh'] += hh['msp-kwh'] bill['duos-day-gbp'] += hh['msp-kwh'] * day_rate else: bill['duos-day-kwh'] += hh['msp-kwh'] bill['duos-day-gbp'] += hh['msp-kwh'] * day_rate if 0 < hh['ct-decimal-hour'] <= 7: slot_name = 'night' elif hh['ct-day-of-week'] < 5 and \ 16 < hh['ct-decimal-hour'] <= 19 and \ (hh['ct-month'] > 10 or hh['ct-month'] < 3): slot_name = 'peak' elif 7 > hh['ct-day-of-week'] > 1 and ( 7 < hh['ct-decimal-hour'] < 15 or 18.5 < hh['ct-decimal-hour'] < 19) and \ (hh['ct-month'] > 11 or hh['ct-month'] < 4): slot_name = 'winter-weekday' else: slot_name = 'other' hh['laf'] = lafs[slot_name] hh['gsp-kwh'] = hh['laf'] * hh['msp-kwh'] hh['gsp-kw'] = hh['gsp-kwh'] * 2 if hh['utc-is-month-end']: tariff = None for k, tf in ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'tariffs').items(): if ds.llfc_code in map(str.strip, k.split(',')): tariff = tf break if tariff is None: raise BadRequest( "The tariff for the LLFC " + ds.llfc_code + " cannot be found for the DNO 20 at " + hh_format(hh['start-date']) + ".") if not ds.is_displaced: year_md_kva_095 = year_md_095(ds, ds.finish_date) bill['duos-excess-availability-kva'] = max( year_md_kva_095 - ds.sc, 0) billed_avail = max(ds.sc, year_md_kva_095) bill['duos-availability-kva'] = ds.sc for threshold, block in [ (15, 15), (100, 5), (250, 10), (500, 25), (1000, 50), (None, 100)]: if threshold is None or billed_avail < threshold: if billed_avail % block > 0: billed_avail = (int(billed_avail / block) + 1) * block break le_200_avail_rate = tariff['capacity-<=200-gbp-per-kva-per-month'] bill['duos-availability-gbp'] += min(200, billed_avail) * \ le_200_avail_rate if billed_avail > 200: gt_200_avail_rate = \ tariff['capacity->200-gbp-per-kva-per-month'] bill['duos-availability-gbp'] += (billed_avail - 200) * \ gt_200_avail_rate if 'fixed-gbp-per-month' in tariff: bill['duos-standing-gbp'] += tariff['fixed-gbp-per-month'] else: bill['duos-standing-gbp'] += tariff['fixed-gbp-per-day'] * \ hh['utc-day']
def content(start_date, finish_date, g_contract_id, user): report_context = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'gas_virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') g_contract = GContract.get_by_id(sess, g_contract_id) forecast_date = chellow.computer.forecast_date() month_start = Datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) month_finish = month_start + relativedelta(months=1) - HH bill_titles = chellow.computer.contract_func( report_context, g_contract, 'virtual_bill_titles', None)() writer.writerow( [ 'MPRN', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) while not month_start > finish_date: period_start = start_date \ if month_start < start_date else month_start if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for g_era in sess.query(GEra).distinct().filter( or_( GEra.imp_g_contract == g_contract, GEra.exp_g_contract == g_contract), GEra.start_date <= period_finish, or_( GEra.finish_date == null(), GEra.finish_date >= period_start)): g_era_start = g_era.start_date if period_start < g_era_start: chunk_start = g_era_start else: chunk_start = period_start g_era_finish = g_era.finish_date if hh_after(period_finish, g_era_finish): chunk_finish = g_era_finish else: chunk_finish = period_finish polarities = [] if g_era.imp_g_contract == g_contract: polarities.append(True) if g_era.exp_g_contract == g_contract: polarities.append(False) for polarity in polarities: data_source = chellow.g_engine.DataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, polarity, None, report_context) site = sess.query(Site).join(SiteGEra).filter( SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one() vals = [ data_source.mprn, site.code, site.name, data_source.supplier_account, hh_format(data_source.start_date), hh_format(data_source.finish_date)] chellow.computer.contract_func( report_context, g_contract, 'virtual_bill', None)(data_source) bill = data_source.bill for title in bill_titles: if title in bill: val = str(bill[title]) del bill[title] else: val = '' vals.append(val) for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def none_content(site_codes, typ, start_date, finish_date, user, file_name): sess = zf = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( file_name, user) sites = (sess.query(Site).join(SiteEra).join(Era).filter( SiteEra.is_physical == true(), or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, )) if site_codes is not None: sites = sites.filter(Site.code.in_(site_codes)) zf = zipfile.ZipFile(running_name, "w") start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: buf = StringIO() writer = csv.writer(buf, lineterminator="\n") writer.writerow([ "Site Code", "Site Name", "Associated Site Codes", "Sources", "Generator Types", "From", "To", "Type", "Date", ] + list(map(str, range(1, 51)))) associates = " ".join( s.code for s in site.find_linked_sites(sess, start_date, finish_date)) source_codes = set() gen_types = set() for supply in (sess.query(Supply).join(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ).distinct().options(joinedload(Supply.source), joinedload(Supply.generator_type))): source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) source_codes_str = ", ".join(sorted(source_codes)) gen_types_str = ", ".join(sorted(gen_types)) row = None for hh in site.hh_data(sess, start_date, finish_date): ct_start_date = to_ct(hh["start_date"]) if ct_start_date.hour == 0 and ct_start_date.minute == 0: if row is not None: writer.writerow(row) row = [ site.code, site.name, associates, source_codes_str, gen_types_str, start_date_str, finish_date_str, typ, ct_start_date.strftime("%Y-%m-%d"), ] row.append(str(round(hh[typ], 2))) if row is not None: writer.writerow(row) zf.writestr( f"{site.code}_{finish_date.strftime('%Y%m%d%M%H')}.csv", buf.getvalue(), ) # Avoid long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) zf.write(msg) finally: if sess is not None: sess.close() if zf is not None: zf.close() os.rename(running_name, finished_name)
def content(start_date, finish_date, contract_id, user): caches = {} sess = supply_source = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "mop_virtual_bills.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") contract = Contract.get_mop_by_id(sess, contract_id) forecast_date = chellow.computer.forecast_date() header_titles = [ "Import MPAN Core", "Export MPAN Core", "Start Date", "Finish Date", ] bill_titles = chellow.computer.contract_func(caches, contract, "virtual_bill_titles")() writer.writerow(header_titles + bill_titles) vb_func = chellow.computer.contract_func(caches, contract, "virtual_bill") for era in (sess.query(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.mop_contract == contract, ).order_by(Era.imp_mpan_core, Era.exp_mpan_core, Era.start_date)): chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) import_mpan_core = era.imp_mpan_core if import_mpan_core is None: import_mpan_core_str = "" else: is_import = True import_mpan_core_str = import_mpan_core export_mpan_core = era.exp_mpan_core if export_mpan_core is None: export_mpan_core_str = "" else: is_import = False export_mpan_core_str = export_mpan_core out = [ import_mpan_core_str, export_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish), ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.mop_bill for title in bill_titles: if title in bill: out.append(make_val(bill[title])) del bill[title] else: out.append("") for k in sorted(bill.keys()): out.append(k) out.append(str(bill[k])) writer.writerow(out) except BadRequest as e: msg = "Problem " if supply_source is not None: msg += ("with supply " + supply_source.mpan_core + " starting at " + hh_format(supply_source.start_date) + " ") msg += str(e) sys.stderr.write(msg) writer.writerow([msg]) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def test_hh_format_none(): dt = None actual = hh_format(dt) assert actual == "ongoing"
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] w.writerow( ( 'Chellow Supply Id', 'Report Start', 'Report Finish', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() supply_from = hh_max(eras[0].start_date, year_start) supply_to = hh_min(eras[-1].finish_date, year_finish) for era in eras: meter_type = era.meter_category period_start = hh_max(era.start_date, year_start) period_finish = hh_min(era.finish_date, year_finish) max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and ( not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: block_start = hh_max(pair_start, period_start) block_finish = hh_min( pair_finish, period_finish) if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': year_seconds = ( year_finish - year_start).total_seconds() + 60 * 30 period_seconds = ( period_finish - period_start).total_seconds() + 60 * 30 total_kwh[meter_type] += era.imp_sc * period_seconds / \ year_seconds normal_days[meter_type] += period_seconds / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 w.writerow( [ supply.id, hh_format(year_start), hh_format(year_finish), mpan_core, site.code, site.name, hh_format(supply_from), hh_format(supply_to), breakdown] + [ normal_days[t] for t in meter_types] + [ max_normal_days[t] for t in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[t] for t in meter_types] + [filled_kwh[t] for t in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) # avoid a long running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def test_hh_format_hh(): dt = utc_datetime(2019, 6, 30) actual = hh_format(dt, with_hh=True) assert actual == ("2019-06-30 01:00", 3)
def datum_2010_04_01(ds, hh): bill = ds.supplier_bill dno_cache = ds.caches['dno'][ds.dno_code] try: tariff_bands_cache = dno_cache['tariff_bands'] except KeyError: dno_cache['tariff_bands'] = {} tariff_bands_cache = dno_cache['tariff_bands'] try: tariff_bands = tariff_bands_cache[ds.llfc_code] except KeyError: tariff_bands_cache[ds.llfc_code] = {} tariff_bands = tariff_bands_cache[ds.llfc_code] try: tariff, band = tariff_bands[hh['start-date']] except KeyError: tariff = None for llfcs, tf in ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'tariffs').items(): if ds.llfc_code in [cd.strip() for cd in llfcs.split(',')]: tariff = tf break if tariff is None: raise BadRequest( "For the DNO " + ds.dno_code + " and timestamp " + hh_format(hh['start-date']) + " the LLFC '" + ds.llfc_code + "' can't be found in the 'tariffs' section.") band = 'green' if ds.dno_code == '14': if hh['ct-day-of-week'] < 5: if 16 <= hh['ct-decimal-hour'] < 19: band = 'red' elif 7 < hh['ct-decimal-hour'] < 21: band = 'amber' elif ds.dno_code == '20': if hh['ct-day-of-week'] < 5: if 16 < hh['ct-decimal-hour'] < 19: band = 'red' elif 9 <= hh['ct-decimal-hour'] <= 20: band = 'amber' elif ds.dno_code == '22': if hh['ct-day-of-week'] > 4: if 16 < hh['ct-decimal-hour'] <= 19: band = 'amber' else: if 17 <= hh['ct-decimal-hour'] < 19: band = 'red' elif 7 < hh['ct-decimal-hour'] <= 21: band = 'amber' else: raise BadRequest("DNO code not recognized.") tariff_bands[hh['start-date']] = (tariff, band) try: laf_cache = dno_cache['lafs'] except KeyError: dno_cache['lafs'] = {} laf_cache = dno_cache['lafs'] try: laf_cache_v = laf_cache[ds.voltage_level_code] except KeyError: laf_cache[ds.voltage_level_code] = {} laf_cache_v = laf_cache[ds.voltage_level_code] try: lafs = laf_cache_v[ds.is_substation] except KeyError: laf_cache_v[ds.is_substation] = {} lafs = laf_cache_v[ds.is_substation] try: laf = lafs[hh['start-date']] except KeyError: vl_key = ds.voltage_level_code.lower() + \ ('-sub' if ds.is_substation else '-net') slot_name = 'other' if ds.dno_code == '20': if 0 < hh['ct-decimal-hour'] <= 7: slot_name = 'night' elif hh['ct-day-of-week'] < 5 and hh['ct-month'] in [11, 12, 1, 2]: if 16 <= hh['ct-decimal-hour'] < 19: slot_name = 'peak' elif 7 < hh['ct-decimal-hour'] < 20: slot_name = 'winter-weekday' elif ds.dno_code in ['14', '22']: if 23 < hh['ct-decimal-hour'] or hh['ct-decimal-hour'] <= 6: slot_name = 'night' elif hh['ct-day-of-week'] < 5 and hh['ct-month'] in [11, 12, 1, 2]: if 16 <= hh['ct-decimal-hour'] < 19: slot_name = 'winter-weekday-peak' elif hh['ct-decimal-hour'] < 16: slot_name = 'winter-weekday-day' else: raise BadRequest("Not recognized") laf = ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'lafs')[vl_key][slot_name] lafs[hh['start-date']] = laf hh['laf'] = laf hh['gsp-kwh'] = laf * hh['msp-kwh'] hh['gsp-kw'] = hh['gsp-kwh'] * 2 kvarh = max(max(hh['imp-msp-kvarh'], hh['exp-msp-kvarh']) - (0.95 ** -2 - 1) ** 0.5 * hh['msp-kwh'], 0) bill['duos-reactive-kvarh'] += kvarh rate = tariff['gbp-per-kvarh'] ds.supplier_rate_sets['duos-reactive-rate'].add(rate) bill['duos-reactive-gbp'] += kvarh * rate rate = tariff[KEYS[band]['tariff-rate']] ds.supplier_rate_sets[KEYS[band]['bill-rate']].add(rate) bill[KEYS[band]['kwh']] += hh['msp-kwh'] bill[KEYS[band]['gbp']] += rate * hh['msp-kwh'] if hh['ct-decimal-hour'] == 23.5 and not ds.is_displaced: bill['duos-fixed-days'] += 1 rate = tariff['gbp-per-mpan-per-day'] ds.supplier_rate_sets['duos-fixed-rate'].add(rate) bill['duos-fixed-gbp'] += rate bill['duos-availability-days'] += 1 kva = ds.sc ds.supplier_rate_sets['duos-availability-kva'].add(kva) rate = tariff['gbp-per-kva-per-day'] ds.supplier_rate_sets['duos-availability-rate'].add(rate) bill['duos-availability-gbp'] += rate * kva if hh['ct-is-month-end'] and not ds.is_displaced: month_to = hh['start-date'] month_from = month_to - relativedelta(months=1) + HH md_kva = 0 days_in_month = 0 for dsc in chellow.computer.get_data_sources(ds, month_from, month_to): for datum in dsc.hh_data: md_kva = max( md_kva, ( datum['msp-kw'] ** 2 + max( datum['imp-msp-kvar'], datum['exp-msp-kvar']) ** 2) ** 0.5) if datum['utc-decimal-hour'] == 0: days_in_month += 1 excess_kva = max(md_kva - ds.sc, 0) if 'excess-gbp-per-kva-per-day' in tariff: rate = tariff['excess-gbp-per-kva-per-day'] ds.supplier_rate_sets['duos-excess-availability-kva'].add( excess_kva) rate = tariff['excess-gbp-per-kva-per-day'] ds.supplier_rate_sets['duos-excess-availability-rate'].add(rate) bill['duos-excess-availability-days'] += days_in_month bill['duos-excess-availability-gbp'] += rate * excess_kva * \ days_in_month
def content(year, supply_id, sess): caches = {} try: year_finish = Datetime(year, 4, 1, tzinfo=pytz.utc) - HH def triad_csv(supply_source): if supply_source is None or \ supply_source.mpan_core.startswith('99'): return [''] * 19 chellow.duos.duos_vb(supply_source) chellow.triad.hh(supply_source) chellow.triad.bill(supply_source) bill = supply_source.supplier_bill for rname, rset in supply_source.supplier_rate_sets.items(): if len(rset) == 1: bill[rname] = rset.pop() values = [supply_source.mpan_core] for i in range(1, 4): triad_prefix = 'triad-actual-' + str(i) for suffix in [ '-date', '-msp-kw', '-status', '-laf', '-gsp-kw']: values.append(bill[triad_prefix + suffix]) suffixes = ['gsp-kw', 'rate', 'gbp'] values += [bill['triad-actual-' + suf] for suf in suffixes] return values yield ', '.join( ( "Site Code", "Site Name", "Supply Name", "Source", "Generator Type", "Import MPAN Core", "Import T1 Date", "Import T1 MSP kW", "Import T1 Status", "Import T1 LAF", "Import T1 GSP kW", "Import T2 Date", "Import T2 MSP kW", "Import T2 Status", "Import T2 LAF", "Import T2 GSP kW", "Import T3 Date", "Import T3 MSP kW", "Import T3 Status", "Import T3 LAF", "Import T3 GSP kW", "Import GSP kW", "Import Rate GBP / kW", "Import GBP", "Export MPAN Core", "Export T1 Date", "Export T1 MSP kW", "Export T1 Status", "Export T1 LAF", "Export T1 GSP kW", "Export T2 Date", "Export T2 MSP kW", "Export T2 Status", "Export T2 LAF", "Export T2 GSP kW", "Export T3 Date", "Export T3 MSP kW", "Export T3 Status", "Export T3 LAF", "Export T3 GSP kW", "Export GSP kW", "Export Rate GBP / kW", "Export GBP")) + '\n' forecast_date = chellow.computer.forecast_date() eras = sess.query(Era).join(Supply).join(Source).join(Pc).filter( Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_finish), Source.code.in_(('net', 'gen-net')), Pc.code == '00').order_by(Supply.id) if supply_id is not None: eras = eras.filter(Supply.id == supply_id) for era in eras: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one() supply = era.supply yield site.code + ',"' + site.name + '","' + supply.name + '",' + \ supply.source.code imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_supply_source = None else: imp_supply_source = chellow.computer.SupplySource( sess, year_finish, year_finish, forecast_date, era, True, None, caches) exp_mpan_core = era.exp_mpan_core if exp_mpan_core is None: exp_supply_source = None else: exp_supply_source = chellow.computer.SupplySource( sess, year_finish, year_finish, forecast_date, era, False, None, caches) gen_type = supply.generator_type gen_type = '' if gen_type is None else gen_type.code for value in [gen_type] + triad_csv(imp_supply_source) + \ triad_csv(exp_supply_source): if isinstance(value, Datetime): yield ',"' + hh_format(value) + '"' else: yield ',"' + str(value) + '"' yield '\n' except: yield traceback.format_exc()
def content(end_year, end_month, months, site_id, sess): caches = {} try: finish_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) forecast_date = chellow.computer.forecast_date() site = Site.get_by_id(sess, site_id) month_start = start_date month_finish = month_start + relativedelta(months=1) - HH while not month_finish > finish_date: for group in site.groups(sess, month_start, month_finish, True): if group.start_date > month_start: chunk_start = group.start_date else: chunk_start = month_start if group.finish_date > month_finish: chunk_finish = month_finish else: chunk_finish = group.finish_date displaced_era = chellow.computer.displaced_era( sess, group, chunk_start, chunk_finish) if displaced_era is None: continue supplier_contract = displaced_era.imp_supplier_contract linked_sites = ','.join( a_site.code for a_site in group.sites if not a_site == site) generator_types = ' '.join( sorted( [ supply.generator_type.code for supply in group.supplies if supply.generator_type is not None])) total_gen_breakdown = {} results = iter( sess.execute( "select supply.id, hh_datum.value, " "hh_datum.start_date, channel.imp_related, " "source.code, generator_type.code as gen_type_code " "from hh_datum, channel, source, era, supply left " "outer join generator_type on " "supply.generator_type_id = generator_type.id where " "hh_datum.channel_id = channel.id and " "channel.era_id = era.id and era.supply_id = " "supply.id and supply.source_id = source.id and " "channel.channel_type = 'ACTIVE' and not " "(source.code = 'net' and channel.imp_related is " "true) and hh_datum.start_date >= :chunk_start and " "hh_datum.start_date <= :chunk_finish and " "supply.id = any(:supply_ids) order by " "hh_datum.start_date, supply.id", params={ 'chunk_start': chunk_start, 'chunk_finish': chunk_finish, 'supply_ids': [s.id for s in group.supplies]})) try: res = next(results) hhChannelValue = res.value hhChannelStartDate = res.start_date imp_related = res.imp_related source_code = res.code gen_type = res.gen_type_code hh_date = chunk_start while hh_date <= finish_date: gen_breakdown = {} exported = 0 while hhChannelStartDate == hh_date: if not imp_related and source_code in ( 'net', 'gen-net'): exported += hhChannelValue if (imp_related and source_code == 'gen') or ( not imp_related and source_code == 'gen-net'): gen_breakdown[gen_type] = \ gen_breakdown.setdefault(gen_type, 0) + \ hhChannelValue if (not imp_related and source_code == 'gen') or \ (imp_related and source_code == 'gen-net'): gen_breakdown[gen_type] = \ gen_breakdown.setdefault(gen_type, 0) - \ hhChannelValue try: res = next(results) source_code = res.code hhChannelValue = res.value hhChannelStartDate = res.start_date imp_related = res.imp_related gen_type = res.gen_type_code except StopIteration: hhChannelStartDate = None displaced = sum(gen_breakdown.values()) - exported added_so_far = 0 for key in sorted(gen_breakdown.keys()): kwh = gen_breakdown[key] if kwh + added_so_far > displaced: total_gen_breakdown[key] = \ total_gen_breakdown.get(key, 0) + \ displaced - added_so_far break else: total_gen_breakdown[key] = \ total_gen_breakdown.get(key, 0) + kwh added_so_far += kwh hh_date += HH except StopIteration: pass site_ds = chellow.computer.SiteSource( sess, site, chunk_start, chunk_finish, forecast_date, None, caches, displaced_era) disp_func = chellow.computer.contract_func( caches, supplier_contract, 'displaced_virtual_bill', None) disp_func(site_ds) bill = site_ds.supplier_bill bill_titles = chellow.computer.contract_func( caches, supplier_contract, 'displaced_virtual_bill_titles', None)() yield ','.join( [ 'Site Code', 'Site Name', 'Associated Site Ids', 'From', 'To', 'Gen Types', 'CHP kWh', 'LM kWh', 'Turbine kWh', 'PV kWh'] + bill_titles) + '\n' yield ','.join('"' + str(value) + '"' for value in [ site.code, site.name, linked_sites, hh_format(chunk_start), hh_format(chunk_finish), generator_types] + [ total_gen_breakdown.get(t, '') for t in [ 'chp', 'lm', 'turb', 'pv']]) for title in bill_titles: if title in bill: v = bill[title] if isinstance(v, Datetime): val = hh_format(v) else: val = str(v) del bill[title] else: val = '' yield ',"' + val + '"' for k in sorted(bill.keys()): v = bill[k] if isinstance(v, Datetime): val = hh_format(v) else: val = str(v) yield ',"' + k + '","' + val + '"' yield '\n' month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except: yield traceback.format_exc()
def content( start_date, finish_date, supply_id, mpan_cores, is_zipped, user): if is_zipped: file_extension = ".zip" else: file_extension = ".csv" base_name = "hh_data_row_" + start_date.strftime("%Y%m%d%H%M") + \ file_extension titles = ','.join('"' + v + '"' for v in ( "Site Code", "Imp MPAN Core", "Exp Mpan Core", "Start Date", "Import ACTIVE", "Import ACTIVE Status", "Import REACTIVE_IMP", "Import REACTIVE_IMP Status", "Import REACTIVE_EXP", "Import REACTIVE_EXP Status", "Export ACTIVE", "Export ACTIVE Status", "Export REACTIVE_IMP", "Export REACTIVE_IMP Status", "Export REACTIVE_EXP", "Export REACTIVE_EXP Status")) + "\n" running_name, finished_name = chellow.dloads.make_names(base_name, user) if is_zipped: zf = zipfile.ZipFile(running_name, 'w') else: tmp_file = open(running_name, "w") sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), ).order_by(Era.supply_id, Era.start_date).distinct() if supply_id is not None: sup = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Era.supply == sup) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) if not is_zipped: tmp_file.write(titles) for supply in supplies: site, era = sess.query( Site, Era).join(Era.site_eras).filter( Era.supply == supply, Era.start_date <= finish_date, SiteEra.site_id == Site.id, or_( Era.finish_date == null(), Era.finish_date >= start_date), SiteEra.is_physical == true()).order_by(Era.id).first() outs = [] for hh_start_date, imp_active, imp_active_status, \ imp_reactive_imp, imp_reactive_imp_status, \ imp_reactive_exp, imp_reactive_exp_status, \ exp_active, exp_active_status, exp_reactive_imp, \ exp_reactive_imp_status, exp_reactive_exp, \ exp_reactive_exp_status in sess.execute(""" select hh_base.start_date, max(imp_active.value), max(imp_active.status), max(imp_reactive_imp.value), max(imp_reactive_imp.status), max(imp_reactive_exp.value), max(imp_reactive_exp.status), max(exp_active.value), max(exp_active.status), max(exp_reactive_imp.value), max(imp_reactive_imp.status), max(exp_reactive_imp.value), max(imp_reactive_exp.status) from hh_datum hh_base join channel on hh_base.channel_id = channel.id join era on channel.era_id = era.id left join hh_datum imp_active on (imp_active.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'ACTIVE') left join hh_datum imp_reactive_imp on (imp_reactive_imp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_IMP') left join hh_datum imp_reactive_exp on (imp_reactive_exp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_EXP') left join hh_datum exp_active on (exp_active.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'ACTIVE') left join hh_datum exp_reactive_imp on (exp_reactive_imp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_IMP') left join hh_datum exp_reactive_exp on (exp_reactive_exp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_EXP') where supply_id = :supply_id and hh_base.start_date between :start_date and :finish_date group by hh_base.start_date order by hh_base.start_date """, params={ 'supply_id': supply.id, 'start_date': start_date, 'finish_date': finish_date}): outs.append(','.join( '"' + ('' if v is None else str(v)) + '"' for v in ( site.code, era.imp_mpan_core, era.exp_mpan_core, hh_format(hh_start_date), imp_active, imp_active_status, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_exp, imp_reactive_exp_status, exp_active, exp_active_status, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_exp, exp_reactive_exp_status)) + '\n') if is_zipped: zf.writestr( ( "hh_data_row_" + str(era.id) + "_" + str(era.imp_mpan_core) + "_" + str(era.exp_mpan_core)).replace(' ', '') + '.csv', titles + ''.join(outs)) else: tmp_file.write(''.join(outs)) except: msg = "Problem " + traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) else: tmp_file.write(msg) finally: if sess is not None: sess.close() if is_zipped: zf.close() else: tmp_file.close() os.rename(running_name, finished_name)
def datum_beginning_22(ds, hh): bill = ds.supplier_bill tariff = ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'tariffs')[ds.llfc_code] lafs = ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'lafs')[VL_LOOKUP[ds.voltage_level_code][ds.is_substation]] if ds.is_import: try: day_rate = tariff['day-gbp-per-kwh'] except KeyError: raise BadRequest( "For the DNO " + ds.dno_contract.name + " and the rate script at date " + hh_format(hh['start-date']) + " and the rate 'tariffs' with the LLFC code " + ds.llfc_code + " the key 'day-gbp-per-kwh' can't be found.") night_rate = tariff['night-gbp-per-kwh'] if 6 < hh['ct-decimal-hour'] <= 23: bill['duos-day-kwh'] += hh['msp-kwh'] bill['duos-day-gbp'] += hh['msp-kwh'] * day_rate else: bill['duos-night-kwh'] += hh['msp-kwh'] bill['duos-night-gbp'] += hh['msp-kwh'] * night_rate if 23 < hh['ct-decimal-hour'] <= 6: slot_name = 'night' elif hh['ct-day-of-week'] < 5 and ( hh['ct-month'] > 10 or hh['ct-month'] < 3): if 15.5 < hh['ct-decimal-hour'] < 18: slot_name = 'winter-weekday-peak' elif 6 < hh['ct-decimal-hour'] < 15: slot_name = 'winter-weekday-day' else: slot_name = 'other' else: slot_name = 'other' hh['laf'] = lafs[slot_name] hh['gsp-kwh'] = hh['laf'] * hh['msp-kwh'] hh['gsp-kw'] = hh['gsp-kwh'] * 2 if hh['utc-is-month-end']: tariff = ds.hh_rate( ds.dno_contract.id, hh['start-date'], 'tariffs')[ds.llfc_code] reactive_rate = tariff['reactive-gbp-per-kvarh'] bill['duos-reactive-rate'] = reactive_rate days_in_month = hh['utc-day'] if not ds.is_displaced: md_kva = max( (datum['msp-kw'] ** 2 + datum['imp-msp-kvar'] ** 2) ** 0.5 for datum in ds.hh_data) bill['duos-availability-kva'] = ds.sc bill['duos-excess-availability-kva'] = max(md_kva - ds.sc, 0) for prefix in ['', 'excess-']: tariff_key = prefix + 'gbp-per-kva-per-day' if tariff_key in tariff: rate_key = 'duos-' + prefix + 'availability-rate' bill[rate_key] = tariff[tariff_key] bill['duos-' + prefix + 'availability-days'] = \ days_in_month bill['duos-' + prefix + 'availability-gbp'] = \ bill[rate_key] * \ bill['duos-' + prefix + 'availability-kva'] * \ bill['duos-' + prefix + 'availability-days'] month_imp_kvarh = sum(h['imp-msp-kvarh'] for h in ds.hh_data) month_kwh = sum(h['msp-kwh'] for h in ds.hh_data) if month_kwh is None: month_kwh = 0 bill['duos-reactive-gbp'] += max(0, month_imp_kvarh - month_kwh / 2) \ * reactive_rate
def content(year, site_id, user): caches = {} sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'output.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ( "Site Code", "Site Name", "Displaced TRIAD 1 Date", "Displaced TRIAD 1 MSP kW", "Displaced TRIAD LAF", "Displaced TRIAD 1 GSP kW", "Displaced TRIAD 2 Date", "Displaced TRIAD 2 MSP kW", "Displaced TRIAD 2 LAF", "Displaced TRIAD 2 GSP kW", "Displaced TRIAD 3 Date", "Displaced TRIAD 3 MSP kW", "Displaced TRIAD 3 LAF", "Displaced TRIAD 3 GSP kW", "Displaced GSP kW", "Displaced Rate GBP / kW", "GBP")) march_finish = datetime.datetime(year, 4, 1, tzinfo=pytz.utc) - HH march_start = datetime.datetime(year, 3, 1, tzinfo=pytz.utc) forecast_date = chellow.computer.forecast_date() if site_id is None: sites = sess.query(Site).join(SiteEra).join(Era).join(Supply).join( Source).filter( Source.code.in_(('gen', 'gen-net')), Era.start_date <= march_finish, or_( Era.finish_date == null(), Era.finish_date >= march_start)).distinct() else: site = Site.get_by_id(sess, site_id) sites = sess.query(Site).filter(Site.id == site.id) for site in sites.order_by(Site.code): displaced_era = chellow.computer.displaced_era( sess, caches, site, march_start, march_finish, forecast_date) if displaced_era is None: continue site_ds = chellow.computer.SiteSource( sess, site, march_start, march_finish, forecast_date, caches, displaced_era) chellow.duos.duos_vb(site_ds) chellow.triad.hh(site_ds) chellow.triad.bill(site_ds) bill = site_ds.supplier_bill for rname, rset in site_ds.supplier_rate_sets.items(): if len(rset) == 1: bill[rname] = rset.pop() values = [site.code, site.name] for i in range(1, 4): triad_prefix = 'triad-actual-' + str(i) values.append(hh_format(bill[triad_prefix + '-date'])) for suffix in ['-msp-kw', '-laf', '-gsp-kw']: values.append(bill[triad_prefix + suffix]) values += [ str(bill['triad-actual-' + suf]) for suf in [ 'gsp-kw', 'rate', 'gbp']] writer.writerow(values) except BadRequest as e: writer.writerow([e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(g_supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'g_supply_virtual_bill_' + str(g_supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') g_supply = GSupply.get_by_id(sess, g_supply_id) forecast_dt = forecast_date() prev_titles = None for g_era in sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date < finish_date, or_(GEra.finish_date == null(), GEra.finish_date > start_date)).order_by(GEra.start_date): chunk_start = hh_max(g_era.start_date, start_date) chunk_finish = hh_min(g_era.finish_date, finish_date) site = sess.query(Site).join(SiteGEra).filter( SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one() ds = GDataSource(sess, chunk_start, chunk_finish, forecast_dt, g_era, caches, None) titles = [ 'MPRN', 'Site Code', 'Site Name', 'Account', 'From', 'To', '' ] output_line = [ g_supply.mprn, site.code, site.name, ds.account, hh_format(ds.start_date), hh_format(ds.finish_date), '' ] contract_titles = g_contract_func(caches, g_era.g_contract, 'virtual_bill_titles')() titles.extend(contract_titles) g_contract_func(caches, g_era.g_contract, 'virtual_bill')(ds) bill = ds.bill for title in contract_titles: if title in bill: output_line.append(csv_make_val(bill[title])) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(start_date, finish_date, site_id, user): sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'sites_duration.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ( "Site Id", "Site Name", "Associated Site Ids", "Sources", "Generator Types", "From", "To", "Imported kWh", "Displaced kWh", "Exported kWh", "Used kWh", "Parasitic kWh", "Generated kWh", "Meter Type")) streams = ( 'imp_net', 'displaced', 'exp_net', 'used', 'exp_gen', 'imp_gen') sites = sess.query(Site).order_by(Site.code) if site_id is not None: sites = sites.filter(Site.id == site_id) start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: assoc = ' '.join( s.code for s in site.find_linked_sites( sess, start_date, finish_date)) totals = dict((stream, 0) for stream in streams) metering_type = '' source_codes = set() gen_types = set() for era in sess.query(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().options( joinedload(Era.supply).joinedload(Supply.source), joinedload(Era.supply). joinedload(Supply.generator_type)): supply = era.supply source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) era_meter_type = era.make_meter_category() if METER_ORDER[era_meter_type] < METER_ORDER[metering_type]: metering_type = era_meter_type assoc_str = ','.join(sorted(assoc)) sources_str = ','.join(sorted(source_codes)) generators_str = ','.join(sorted(gen_types)) for hh in site.hh_data(sess, start_date, finish_date): for stream in streams: totals[stream] += hh[stream] writer.writerow( ( site.code, site.name, assoc_str, sources_str, generators_str, start_date_str, finish_date_str, totals['imp_net'], totals['displaced'], totals['exp_net'], totals['used'], totals['exp_gen'], totals['imp_gen'], metering_type)) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check bank holidays") contract = Contract.get_non_core_by_name( sess, 'bank_holidays') contract_props = contract.make_properties() if contract_props.get('enabled', False): url_str = contract_props['url'] self.log("Downloading from " + url_str + ".") res = requests.get(url_str) self.log( ' '.join( ( "Received", str(res.status_code), res.reason))) PREFIX = 'DTSTART;VALUE=DATE:' hols = collections.defaultdict(list) for line in res.text.splitlines(): if line.startswith(PREFIX): dt = utc_datetime_parse(line[-8:], "%Y%m%d") hols[dt.year].append(dt) for year in sorted(hols.keys()): year_start = utc_datetime(year, 1, 1) year_finish = year_start + \ relativedelta(years=1) - HH rs = sess.query(RateScript).filter( RateScript.contract == contract, RateScript.start_date == year_start).first() if rs is None: self.log( "Adding a new rate script starting at " + hh_format(year_start) + ".") latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).\ order_by(RateScript.start_date.desc()). \ first() contract.update_rate_script( sess, latest_rs, latest_rs.start_date, year_finish, latest_rs.script) rs = contract.insert_rate_script( sess, year_start, '') script = { 'bank_holidays': [ v.strftime("%Y-%m-%d") for v in hols[year]]} self.log( "Updating rate script starting at " + hh_format(year_start) + ".") contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, json.dumps( script, indent=' ', sort_keys=True)) sess.commit() else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking bank holidays.") self.going.wait(24 * 60 * 60) self.going.clear()
def _process_supply( sess, caches, supply_id, bill_map, forecast_date, contract, vbf, virtual_bill_titles, writer, titles, report_run, ): gaps = {} data_sources = {} market_role_code = contract.market_role.code bill_ids = bill_map[supply_id] while len(bill_ids) > 0: bill_id = list(sorted(bill_ids))[0] bill_ids.remove(bill_id) bill = (sess.query(Bill).filter(Bill.id == bill_id).options( joinedload(Bill.batch), joinedload(Bill.bill_type), joinedload(Bill.reads), joinedload(Bill.supply), joinedload(Bill.reads).joinedload(RegisterRead.present_type), joinedload(Bill.reads).joinedload(RegisterRead.previous_type), ).one()) virtual_bill = {"problem": ""} supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: virtual_bill["problem"] += ( "The MSN " + read_msn + " of the register read " + str(read.id) + " doesn't match the MSN of the era.") for dt, typ in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type), ]: key = str(dt) + "-" + read.msn try: if typ != read_dict[key]: virtual_bill["problem"] += ( " Reads taken " + "on " + str(dt) + " have differing read types.") except KeyError: read_dict[key] = typ bill_start = bill.start_date bill_finish = bill.finish_date covered_start = bill_start covered_finish = bill_start covered_bdown = {"sum-msp-kwh": 0, "net-gbp": 0, "vat-gbp": 0} vb_elems = set() enlarged = True while enlarged: enlarged = False covered_elems = find_elements(bill) covered_bills = OrderedDict((b.id, b) for b in sess.query( Bill).join(Batch).join(Contract).join(MarketRole).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code, ).order_by(Bill.start_date, Bill.issue_date)) while True: to_del = None for a, b in combinations(covered_bills.values(), 2): if all(( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross, )): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del covered_bills[k] bill_ids.discard(k) for k, covered_bill in tuple(covered_bills.items()): elems = find_elements(covered_bill) if elems.isdisjoint(covered_elems): if k != bill.id: del covered_bills[k] continue else: covered_elems.update(elems) if covered_bill.start_date < covered_start: covered_start = covered_bill.start_date enlarged = True break if covered_bill.finish_date > covered_finish: covered_finish = covered_bill.finish_date enlarged = True break if len(covered_bills) == 0: continue primary_covered_bill = None for covered_bill in covered_bills.values(): bill_ids.discard(covered_bill.id) covered_bdown["net-gbp"] += float(covered_bill.net) covered_bdown["vat-gbp"] += float(covered_bill.vat) covered_bdown["sum-msp-kwh"] += float(covered_bill.kwh) covered_rates = defaultdict(set) for k, v in loads(covered_bill.breakdown).items(): if k in ("raw_lines", "raw-lines"): continue if isinstance(v, list): covered_rates[k].update(set(v)) else: if isinstance(v, Decimal): v = float(v) try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " in " + str([b.id for b in covered_bills.values()]) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) if k.endswith("-gbp"): elem = k[:-4] covered_elems.add(elem) add_gap( caches, gaps, elem, covered_bill.start_date, covered_bill.finish_date, False, v, ) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None if primary_covered_bill is None or ( (covered_bill.finish_date - covered_bill.start_date) > (primary_covered_bill.finish_date - primary_covered_bill.start_date)): primary_covered_bill = covered_bill metered_kwh = 0 for era in (sess.query(Era).filter( Era.supply == supply, Era.start_date <= covered_finish, or_(Era.finish_date == null(), Era.finish_date >= covered_start), ).distinct().options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.supply).joinedload(Supply.dno), joinedload(Era.supply).joinedload(Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.source), )): chunk_start = hh_max(covered_start, era.start_date) chunk_finish = hh_min(covered_finish, era.finish_date) if contract not in ( era.mop_contract, era.dc_contract, era.imp_supplier_contract, era.exp_supplier_contract, ): virtual_bill["problem"] += "".join(( "From ", hh_format(chunk_start), " to ", hh_format(chunk_finish), " the contract of ", "the era doesn't match the contract of the ", "bill.", )) continue if contract.market_role.code == "X": polarity = contract != era.exp_supplier_contract else: polarity = era.imp_supplier_contract is not None try: ds_key = ( chunk_start, chunk_finish, forecast_date, era.id, polarity, primary_covered_bill.id, ) data_source = data_sources[ds_key] except KeyError: data_source = data_sources[ ds_key] = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches, primary_covered_bill, ) vbf(data_source) if data_source.measurement_type == "hh": metered_kwh += sum(h["msp-kwh"] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches, ) metered_kwh += sum(h["msp-kwh"] for h in ds.hh_data) if market_role_code == "X": vb = data_source.supplier_bill vb_hhs = data_source.supplier_bill_hhs elif market_role_code == "C": vb = data_source.dc_bill vb_hhs = data_source.dc_bill_hhs elif market_role_code == "M": vb = data_source.mop_bill vb_hhs = data_source.mop_bill_hhs else: raise BadRequest("Odd market role.") for k, v in vb.items(): try: if isinstance(v, set): virtual_bill[k].update(v) else: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest("For key " + str(k) + " and value " + str(v) + ". " + str(detail)) for dt, bl in vb_hhs.items(): for k, v in bl.items(): if all((k.endswith("-gbp"), k != "net-gbp", v != 0)): add_gap(caches, gaps, k[:-4], dt, dt, True, v) for k in virtual_bill.keys(): if k.endswith("-gbp"): vb_elems.add(k[:-4]) long_map = {} vb_keys = set(virtual_bill.keys()) for elem in sorted(vb_elems, key=len, reverse=True): els = long_map[elem] = set() for k in tuple(vb_keys): if k.startswith(elem + "-"): els.add(k) vb_keys.remove(k) for elem in vb_elems.difference(covered_elems): for k in long_map[elem]: del virtual_bill[k] try: del virtual_bill["net-gbp"] except KeyError: pass virtual_bill["net-gbp"] = sum(v for k, v in virtual_bill.items() if k.endswith("-gbp")) era = supply.find_era_at(sess, bill_finish) if era is None: imp_mpan_core = exp_mpan_core = None site_code = site_name = None virtual_bill[ "problem"] += "This bill finishes before or after the supply. " else: imp_mpan_core = era.imp_mpan_core exp_mpan_core = era.exp_mpan_core site = (sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one()) site_code = site.code site_name = site.name # Find bill to use for header data if bill.id not in covered_bills: for cbill in covered_bills.values(): if bill.batch == cbill.batch: bill = cbill values = [ bill.batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, bill_start, bill_finish, imp_mpan_core, exp_mpan_core, site_code, site_name, covered_start, covered_finish, " | ".join(sorted([str(k) for k in covered_bills.keys()])), metered_kwh, ] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append("") try: virt_val = virtual_bill[title] values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = 0 values.append("") if title.endswith("-gbp"): if isinstance(virt_val, (int, float, Decimal)): if isinstance(cov_val, (int, float, Decimal)): values.append(float(cov_val) - float(virt_val)) else: values.append(0 - float(virt_val)) else: values.append(0) report_run_values = {} report_run_titles = list(titles) for title in sorted(virtual_bill.keys()): virt_val = virtual_bill[title] virt_title = "virtual-" + title values += [virt_title, virt_val] report_run_values[virt_title] = virt_val report_run_titles.append(virt_title) if title in covered_bdown: cov_title = "covered-" + title cov_val = covered_bdown[title] report_run_values[cov_title] = cov_val report_run_titles.append(cov_title) if title.endswith("-gbp"): if isinstance(virt_val, (int, float, Decimal)): if isinstance(cov_val, (int, float, Decimal)): diff_val = float(cov_val) - float(virt_val) else: diff_val = 0 - float(virt_val) else: diff_val = 0 report_run_values[f"difference-{title}"] = diff_val t = "difference-tpr-gbp" try: report_run_values[t] += diff_val except KeyError: report_run_values[t] = diff_val report_run_titles.append(t) else: cov_title, cov_val = "", "" values += [cov_title, cov_val] writer.writerow([csv_make_val(v) for v in values]) report_run_values.update(dict(zip(titles, values))) report_run_values["bill_id"] = bill.id report_run_values["batch_id"] = bill.batch.id report_run_values["supply_id"] = supply.id report_run_values["site_id"] = None if site_code is None else site.id report_run.insert_row(sess, "", report_run_titles, report_run_values) for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, ): for k, v in loads(bill.breakdown).items(): if k.endswith("-gbp"): add_gap( caches, gaps, k[:-4], bill.start_date, bill.finish_date, False, v, ) # Avoid long-running transactions sess.commit() clumps = [] for element, elgap in sorted(gaps.items()): for start_date, hhgap in sorted(elgap.items()): if hhgap["has_virtual"] and not hhgap["has_covered"]: if len(clumps) == 0 or not all(( clumps[-1]["element"] == element, clumps[-1]["finish_date"] + HH == start_date, )): clumps.append({ "element": element, "start_date": start_date, "finish_date": start_date, "gbp": hhgap["gbp"], }) else: clumps[-1]["finish_date"] = start_date for i, clump in enumerate(clumps): vals = {} for title in titles: if title.startswith("difference-") and title.endswith("-gbp"): vals[title] = 0 else: vals[title] = None vals["covered-problem"] = "_".join(( "missing", clump["element"], "supplyid", str(supply.id), "from", hh_format(clump["start_date"]), )) vals["imp-mpan-core"] = imp_mpan_core vals["exp-mpan-core"] = exp_mpan_core vals["batch"] = "missing_bill" vals["bill-start-date"] = hh_format(clump["start_date"]) vals["bill-finish-date"] = hh_format(clump["finish_date"]) vals["difference-net-gbp"] = clump["gbp"] writer.writerow(csv_make_val(vals[title]) for title in titles) vals["bill_id"] = None vals["batch_id"] = None vals["supply_id"] = supply.id vals["site_id"] = None if site_code is None else site.id report_run.insert_row(sess, "", titles, vals) # Avoid long-running transactions sess.commit()
def process_site( sess, site, month_start, month_finish, forecast_date, tmp_file, start_date, finish_date, caches): site_code = site.code associates = [] sources = set() generator_types = set() metering_type = 'no-supply' problem = '' month_data = {} for stream_name in [ 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party']: month_data[stream_name + '-kwh'] = 0 month_data[stream_name + '-gbp'] = 0 billed_gbp = 0 billed_kwh = 0 for group in site.groups(sess, month_start, month_finish, False): for cand_site in group.sites: cand_site_code = cand_site.code if cand_site_code != site_code and \ cand_site_code not in associates: associates.append(cand_site_code) for cand_supply in group.supplies: sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: generator_types.add(cand_supply.generator_type.code) for cand_era in cand_supply.find_eras( sess, group.start_date, group.finish_date): if metering_type != 'hh': if cand_era.pc.code == '00': metering_type = 'hh' elif metering_type != 'amr': if len(cand_era.channels) > 0: metering_type = 'amr' elif metering_type != 'nhh': if cand_era.mtc.meter_type.code not in [ 'UM', 'PH']: metering_type = 'nhh' else: metering_type = 'unmetered' for group in site.groups(sess, month_start, month_finish, True): if group.start_date > start_date: chunk_start = group.start_date else: chunk_start = start_date if group.finish_date > finish_date: chunk_finish = finish_date else: chunk_finish = group.finish_date for supply in group.supplies: source_code = supply.source.code for era in sess.query(Era).filter( Era.supply == supply, Era.start_date <= chunk_finish, or_( Era.finish_date == null(), Era.finish_date >= chunk_start)).options( joinedload(Era.mop_contract), joinedload(Era.hhdc_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract)): tmp_file.write(' ') # GBP if era.start_date > chunk_start: bill_start = era.start_date else: bill_start = chunk_start if hh_after(era.finish_date, chunk_finish): bill_finish = chunk_finish else: bill_finish = era.finish_date supply_source = None supplier_contract = era.imp_supplier_contract if supplier_contract is not None: supply_source = chellow.computer.SupplySource( sess, bill_start, bill_finish, forecast_date, era, True, tmp_file, caches) if supply_source.measurement_type not in ['hh', 'amr']: kwh = sum( hh['msp-kwh'] for hh in supply_source.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh elif source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-kwh'] += kwh import_vb_function = chellow.computer.contract_func( caches, supplier_contract, 'virtual_bill', tmp_file) if import_vb_function is None: problem += "Can't find the virtual_bill function in " \ "the supplier contract. " else: import_vb_function(supply_source) v_bill = supply_source.supplier_bill if 'problem' in v_bill and len(v_bill['problem']) > 0: problem += 'Supplier Problem: ' + v_bill['problem'] try: gbp = v_bill['net-gbp'] except KeyError: problem += 'For the supply ' + \ supply_source.mpan_core + \ ' the virtual bill ' + str(v_bill) + \ ' from the contract ' + \ supplier_contract.getName() + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp elif source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp if supply_source is None: supply_source = chellow.computer.SupplySource( sess, bill_start, bill_finish, forecast_date, era, False, tmp_file, caches) dc_contract = era.hhdc_contract supply_source.contract_func( dc_contract, 'virtual_bill')(supply_source) dc_bill = supply_source.dc_bill dc_gbp = dc_bill['net-gbp'] if 'problem' in dc_bill and len(dc_bill['problem']) > 0: problem += 'DC Problem: ' + dc_bill['problem'] mop_contract = era.mop_contract mop_bill_function = supply_source.contract_func( mop_contract, 'virtual_bill') mop_bill_function(supply_source) mop_bill = supply_source.mop_bill mop_gbp = mop_bill['net-gbp'] if 'problem' in mop_bill and len(mop_bill['problem']) > 0: problem += 'MOP Problem: ' + mop_bill['problem'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += dc_gbp + mop_gbp else: month_data['import-net-gbp'] += dc_gbp + mop_gbp for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= chunk_finish, Bill.finish_date >= chunk_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = (bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, chunk_finish) - max(bill_start, chunk_start)).total_seconds() + (30 * 60) overlap_proportion = float(overlap_duration) / bill_duration billed_gbp += overlap_proportion * float(bill.net) billed_kwh += overlap_proportion * float(bill.kwh) displaced_era = chellow.computer.displaced_era( sess, group, chunk_start, chunk_finish) site_ds = chellow.computer.SiteSource( sess, site, chunk_start, chunk_finish, forecast_date, tmp_file, caches, displaced_era) if displaced_era is not None: chellow.computer.contract_func( caches, displaced_era.imp_supplier_contract, 'displaced_virtual_bill', tmp_file)(site_ds) month_data['msp-gbp'] += site_ds.supplier_bill['net-gbp'] for stream_name in ( 'import-3rd-party', 'export-3rd-party', 'import-net', 'export-net', 'import-gen', 'export-gen', 'msp'): name = stream_name + '-kwh' month_data[name] += sum(hh[name] for hh in site_ds.hh_data) month_data['used-3rd-party-kwh'] = \ month_data['import-3rd-party-kwh'] - \ month_data['export-3rd-party-kwh'] month_data['used-3rd-party-gbp'] = month_data['import-3rd-party-gbp'] month_data['used-gbp'] += \ month_data['import-net-gbp'] + month_data['msp-gbp'] + \ month_data['used-3rd-party-gbp'] month_data['used-kwh'] += month_data['msp-kwh'] + \ month_data['used-3rd-party-kwh'] + month_data['import-net-kwh'] result = [ site.code, site.name, ','.join(associates), ','.join(sorted(list(sources))), '.'.join(sorted(list(generator_types))), hh_format(month_finish), month_data['import-net-kwh'], month_data['msp-kwh'], month_data['export-net-kwh'], month_data['used-kwh'], month_data['export-gen-kwh'], month_data['import-gen-kwh'], month_data['import-3rd-party-kwh'], month_data['export-3rd-party-kwh'], month_data['import-net-gbp'], month_data['msp-gbp'], 0, month_data['used-gbp'], month_data['used-3rd-party-gbp'], billed_kwh, billed_gbp, metering_type, problem] return result
def content(start_date, finish_date, contract_id, user): caches = {} sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') contract = Contract.get_supplier_by_id(sess, contract_id) forecast_date = chellow.computer.forecast_date() month_start = Datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) month_finish = month_start + relativedelta(months=1) - HH bill_titles = contract_func( caches, contract, 'virtual_bill_titles', None)() writer.writerow( ['MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) while not month_start > finish_date: period_start = start_date \ if month_start < start_date else month_start if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for era in sess.query(Era).distinct().filter( or_( Era.imp_supplier_contract_id == contract.id, Era.exp_supplier_contract_id == contract.id), Era.start_date <= period_finish, or_( Era.finish_date == null(), Era.finish_date >= period_start)): era_start = era.start_date if period_start < era_start: chunk_start = era_start else: chunk_start = period_start era_finish = era.finish_date if hh_after(period_finish, era_finish): chunk_finish = era_finish else: chunk_finish = period_finish polarities = [] if era.imp_supplier_contract == contract: polarities.append(True) if era.exp_supplier_contract == contract: polarities.append(False) for polarity in polarities: vals = [] data_source = SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, None, caches) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() vals = [ data_source.mpan_core, site.code, site.name, data_source.supplier_account, hh_format(data_source.start_date), hh_format(data_source.finish_date)] contract_func( caches, contract, 'virtual_bill', None)(data_source) bill = data_source.supplier_bill for title in bill_titles: if title in bill: val = str(bill[title]) del bill[title] else: val = '' vals.append(val) for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except BadRequest as e: writer.writerow([e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() f.close() os.rename(running_name, finished_name)
def test_general_import_g_bill_reads(mocker): c = mocker.patch("chellow.general_import.GContract", autospec=True) contract = chellow.general_import.GContract("CH4U", "{}", "{}") c.get_by_name.return_value = contract mocker.patch("chellow.models.GBatch", autospec=True) batch = chellow.models.GBatch("CH4U", "{}", "{}", 4) contract.get_g_batch_by_reference.return_value = batch mocker.patch("chellow.models.GBill", autospec=True) bill = chellow.models.GBill( mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), ) batch.insert_g_bill.return_value = bill sess = mocker.Mock() msn = "88hgkdshjf" g_unit_code = "M3" g_unit_class = mocker.patch("chellow.general_import.GUnit", autospec=True) g_unit = chellow.general_import.GUnit(g_unit_code, "", 1) g_unit_class.get_by_code.return_value = g_unit correction_factor = Decimal("1") calorific_value = Decimal("39") prev_value = Decimal("988") prev_date = utc_datetime(2019, 10, 1) prev_type_code = "E" pres_value = Decimal("1200") pres_date = utc_datetime(2019, 10, 31, 23, 30) pres_type_code = "A" g_read_type_class = mocker.patch("chellow.general_import.GReadType", autospec=True) prev_type = chellow.general_import.GReadType(prev_type_code, "") pres_type = chellow.general_import.GReadType(pres_type_code, "") g_read_type_class.get_by_code.side_effect = [prev_type, pres_type] action = "insert" vals = [ "CH4U", "batch 8883", "759288812", "2019-09-08 01:00", "2019-10-01 01:00", "2019-10-31 23:30", "0.00", "0.00", "0.00", "77hwgtlll", "7876hrwlju", "N", "{}", "0", msn, g_unit_code, str(correction_factor), str(calorific_value), hh_format(prev_date), str(prev_value), prev_type_code, hh_format(pres_date), str(pres_value), pres_type_code, ] args = [] chellow.general_import.general_import_g_bill(sess, action, vals, args) bill.insert_g_read.assert_called_with( sess, msn, g_unit, correction_factor, calorific_value, prev_value, prev_date, prev_type, pres_value, pres_date, pres_type, )