def triad_csv(supply_source): if supply_source is None or supply_source.mpan_core.startswith( "99"): return [""] * 19 chellow.duos.duos_vb(supply_source) chellow.triad.hh(supply_source) for hh in supply_source.hh_data: bill_hh = supply_source.supplier_bill_hhs[hh["start-date"]] for k in scalar_names & hh.keys(): bill_hh[k] = hh[k] for k in rate_names & hh.keys(): bill_hh[k] = {hh[k]} bill = reduce_bill_hhs(supply_source.supplier_bill_hhs) values = [supply_source.mpan_core] for i in range(1, 4): triad_prefix = "triad-actual-" + str(i) for suffix in [ "-date", "-msp-kw", "-status", "-laf", "-gsp-kw" ]: values.append(csv_make_val(bill[triad_prefix + suffix])) suffixes = ["gsp-kw", "rate", "gbp"] values += [ csv_make_val(bill["triad-actual-" + suf]) for suf in suffixes ] return values
def content(g_batch_id, g_bill_id, user): forecast_date = to_utc(Datetime.max) report_context = {} sess = tmp_file = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'g_bill_check.csv', user) tmp_file = open(running_name, "w") csv_writer = csv.writer(tmp_file) if g_batch_id is not None: g_batch = GBatch.get_by_id(sess, g_batch_id) g_bills = sess.query(GBill).filter( GBill.g_batch == g_batch).order_by(GBill.reference) elif g_bill_id is not None: g_bill = GBill.get_by_id(sess, g_bill_id) g_bills = sess.query(GBill).filter(GBill.id == g_bill.id) g_batch = g_bill.g_batch g_contract = g_batch.g_contract vbf = chellow.g_engine.g_contract_func( report_context, g_contract, 'virtual_bill') if vbf is None: raise BadRequest( 'The contract ' + g_contract.name + " doesn't have a function virtual_bill.") header_titles = [ 'batch', 'bill_reference', 'bill_type', 'bill_start_date', 'bill_finish_date', 'mprn', 'supply_name', 'site_code', 'site_name', 'covered_start', 'covered_finish', 'covered_bill_ids'] bill_titles = chellow.g_engine.g_contract_func( report_context, g_contract, 'virtual_bill_titles')() titles = header_titles[:] for title in bill_titles: for prefix in ('covered_', 'virtual_'): titles.append(prefix + title) if title.endswith('_gbp'): titles.append('difference_' + title) csv_writer.writerow(titles) for g_bill in g_bills: problem = '' g_supply = g_bill.g_supply read_dict = collections.defaultdict(set) for g_read in g_bill.g_reads: if not all( g_read.msn == era.msn for era in g_supply.find_g_eras( sess, g_read.prev_date, g_read.pres_date)): problem += "The MSN " + g_read.msn + \ " of the register read " + str(g_read.id) + \ " doesn't match the MSN of all the relevant eras." for dt, typ in [ (g_read.pres_date, g_read.pres_type), (g_read.prev_date, g_read.prev_type)]: typ_set = read_dict[str(dt) + "-" + g_read.msn] typ_set.add(typ) if len(typ_set) > 1: problem += " Reads taken on " + str(dt) + \ " have differing read types." g_era = g_supply.find_g_era_at(sess, g_bill.finish_date) if g_era is None: csv_writer.writerow( [ "Extraordinary! There isn't a era for this bill!"]) continue vals = { 'batch': g_batch.reference, 'bill_reference': g_bill.reference, 'bill_type': g_bill.bill_type.code, 'bill_start_date': g_bill.start_date, 'bill_finish_date': g_bill.finish_date, 'mprn': g_supply.mprn, 'covered_vat_gbp': Decimal('0.00'), 'covered_net_gbp': Decimal('0.00'), 'covered_gross_gbp': Decimal('0.00'), 'covered_kwh': Decimal(0), 'covered_start': g_bill.start_date, 'covered_finish': g_bill.finish_date, 'covered_bill_ids': []} covered_primary_bill = None enlarged = True while enlarged: enlarged = False for covered_bill in sess.query(GBill).filter( GBill.g_supply_id == g_supply.id, GBill.start_date <= vals['covered_finish'], GBill.finish_date >= vals['covered_start']).order_by( GBill.issue_date.desc(), GBill.start_date): if covered_primary_bill is None and \ len(covered_bill.g_reads) > 0: covered_primary_bill = covered_bill if covered_bill.start_date < vals['covered_start']: vals['covered_start'] = covered_bill.start_date enlarged = True break if covered_bill.finish_date > vals['covered_finish']: vals['covered_finish'] = covered_bill.finish_date enlarged = True break for covered_bill in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= vals['covered_finish'], GBill.finish_date >= vals['covered_start']).order_by( GBill.issue_date.desc(), GBill.start_date): vals['covered_bill_ids'].append(covered_bill.id) bdown = covered_bill.make_breakdown() vals['covered_kwh'] += covered_bill.kwh vals['covered_net_gbp'] += covered_bill.net vals['covered_vat_gbp'] += covered_bill.vat vals['covered_gross_gbp'] += covered_bill.gross for title in bill_titles: k = 'covered_' + title v = bdown.get(title) if v is not None: if title.endswith('_rate') or title in ( 'correction_factor', 'calorific_value', 'units'): if k not in vals: vals[k] = set() vals[k].add(v) else: try: vals[k] += v except KeyError: vals[k] = v except TypeError: raise BadRequest( "Problem with key " + str(k) + " and value " + str(v) + " for existing " + str(vals[k])) if title in ( 'correction_factor', 'calorific_value', 'units_code', 'units_factor'): if k not in vals: vals[k] = set() for g_read in covered_bill.g_reads: if title in ('units_code', 'units_factor'): g_units = g_read.g_units if title == 'units_code': v = g_units.code else: v = g_units.factor else: v = getattr(g_read, title) vals[k].add(v) for g_era in sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date <= vals['covered_finish'], or_( GEra.finish_date == null(), GEra.finish_date >= vals['covered_start'])).distinct(): site = sess.query(Site).join(SiteGEra).filter( SiteGEra.is_physical == true(), SiteGEra.g_era == g_era).one() if vals['covered_start'] > g_era.start_date: chunk_start = vals['covered_start'] else: chunk_start = g_era.start_date if hh_before(vals['covered_finish'], g_era.finish_date): chunk_finish = vals['covered_finish'] else: chunk_finish = g_era.finish_date data_source = chellow.g_engine.GDataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, report_context, covered_primary_bill) vbf(data_source) for k, v in data_source.bill.items(): vk = 'virtual_' + k try: if isinstance(v, set): vals[vk].update(v) else: vals[vk] += v except KeyError: vals[vk] = v except TypeError as detail: raise BadRequest( "For key " + str(vk) + " and value " + str(v) + ". " + str(detail)) vals['supply_name'] = g_supply.name vals['site_code'] = site.code vals['site_name'] = site.name for k, v in vals.items(): vals[k] = csv_make_val(v) for i, title in enumerate(titles): if title.startswith('difference_'): try: covered_val = float(vals[titles[i - 2]]) virtual_val = float(vals[titles[i - 1]]) vals[title] = covered_val - virtual_val except KeyError: vals[title] = None csv_writer.writerow( [ (vals.get(k) if vals.get(k) is not None else '') for k in titles]) except BadRequest as e: tmp_file.write("Problem: " + e.description) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: try: if sess is not None: sess.close() except: tmp_file.write("\nProblem closing session.") finally: tmp_file.close() os.rename(running_name, finished_name)
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = supply_source = None try: sess = Session() contract = Contract.get_dc_by_id(sess, contract_id) month_list = list( c_months_u(finish_year=end_year, finish_month=end_month, months=months)) start_date, finish_date = month_list[0][0], month_list[-1][-1] forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( "dc_virtual_bills.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") bill_titles = chellow.computer.contract_func(caches, contract, "virtual_bill_titles")() header_titles = [ "Import MPAN Core", "Export MPAN Core", "Start Date", "Finish Date", ] vb_func = chellow.computer.contract_func(caches, contract, "virtual_bill") writer.writerow(header_titles + bill_titles) for era in (sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.dc_contract == contract, ).options(joinedload(Era.channels)).order_by(Era.supply_id)): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = "" is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = "" if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish), ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(csv_make_val(bill.get(title))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(csv_make_val(bill[k])) writer.writerow(vals) # Avoid long-running transactions sess.rollback() except BadRequest as e: msg = "Problem " if supply_source is not None: msg += ("with supply " + supply_source.mpan_core + " starting at " + hh_format(supply_source.start_date) + " ") msg += str(e) writer.writerow([msg]) except BaseException: msg = "Problem " + traceback.format_exc() + "\n" f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(g_supply_id, start_date, finish_date, user): caches = {} try: sess = Session() g_supply = GSupply.get_by_id(sess, g_supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None running_name, finished_name = chellow.dloads.make_names( 'g_supply_virtual_bills_hh_' + str(g_supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') for hh_start in hh_range(caches, start_date, finish_date): g_era = sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date <= hh_start, or_(GEra.finish_date == null(), GEra.finish_date >= hh_start)).one() site = sess.query(Site).join(SiteGEra).filter( SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one() ds = GDataSource(sess, hh_start, hh_start, forecast_date, g_era, caches, None) titles = [ 'MPRN', 'Site Code', 'Site Name', 'Account', 'HH Start', '' ] output_line = [ ds.mprn, site.code, site.name, ds.account, hh_format(ds.start_date), '' ] contract = g_era.g_contract output_line.append('') contract_titles = contract_func(caches, contract, 'virtual_bill_titles')() titles.append('') titles.extend(contract_titles) contract_func(caches, contract, 'virtual_bill')(ds) bill = ds.bill for title in contract_titles: output_line.append(csv_make_val(bill.get(title, ''))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if titles != prev_titles: prev_titles = titles w.writerow(titles) w.writerow(output_line) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None running_name, finished_name = chellow.dloads.make_names( "supply_virtual_bills_hh_" + str(supply_id) + ".csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") for hh_start in hh_range(caches, start_date, finish_date): era = (sess.query(Era).filter( Era.supply == supply, Era.start_date <= hh_start, or_(Era.finish_date == null(), Era.finish_date >= hh_start), ).one()) site = (sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one()) ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, True, caches) titles = [ "MPAN Core", "Site Code", "Site Name", "Account", "HH Start", "" ] output_line = [ ds.mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), "", ] mop_titles = ds.contract_func(era.mop_contract, "virtual_bill_titles")() titles.extend(["mop-" + t for t in mop_titles]) ds.contract_func(era.mop_contract, "virtual_bill")(ds) bill = ds.mop_bill for title in mop_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) output_line.append("") dc_titles = ds.contract_func(era.dc_contract, "virtual_bill_titles")() titles.append("") titles.extend(["dc-" + t for t in dc_titles]) ds.contract_func(era.dc_contract, "virtual_bill")(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if era.imp_supplier_contract is not None: contract = era.imp_supplier_contract output_line.append("") supplier_titles = ds.contract_func(contract, "virtual_bill_titles")() titles.append("") titles.extend(["imp-supplier-" + t for t in supplier_titles]) ds.contract_func(contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if era.exp_supplier_contract is not None: contract = era.exp_supplier_contract ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, False, caches) output_line.append("") supplier_titles = ds.contract_func(contract, "virtual_bill_titles")() titles.append("") titles.extend(["exp-supplier-" + t for t in supplier_titles]) ds.contract_func(contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if titles != prev_titles: prev_titles = titles w.writerow(titles) w.writerow(output_line) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(g_batch_id, g_bill_id, user): forecast_date = to_utc(Datetime.max) report_context = {} sess = tmp_file = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'g_bill_check.csv', user) tmp_file = open(running_name, "w") csv_writer = csv.writer(tmp_file) if g_batch_id is not None: g_batch = GBatch.get_by_id(sess, g_batch_id) g_bills = sess.query(GBill).filter( GBill.g_batch == g_batch).order_by(GBill.reference) elif g_bill_id is not None: g_bill = GBill.get_by_id(sess, g_bill_id) g_bills = sess.query(GBill).filter(GBill.id == g_bill.id) g_batch = g_bill.g_batch g_contract = g_batch.g_contract vbf = chellow.g_engine.g_contract_func(report_context, g_contract, 'virtual_bill') if vbf is None: raise BadRequest('The contract ' + g_contract.name + " doesn't have a function virtual_bill.") header_titles = [ 'batch', 'bill_reference', 'bill_type', 'bill_start_date', 'bill_finish_date', 'mprn', 'supply_name', 'site_code', 'site_name', 'covered_start', 'covered_finish', 'covered_bill_ids' ] bill_titles = chellow.g_engine.g_contract_func(report_context, g_contract, 'virtual_bill_titles')() titles = header_titles[:] for title in bill_titles: for prefix in ('covered_', 'virtual_'): titles.append(prefix + title) if title.endswith('_gbp'): titles.append('difference_' + title) csv_writer.writerow(titles) g_bill_map = defaultdict(set, {}) for b in g_bills: g_bill_map[b.g_supply.id].add(b.id) for g_supply_id, g_bill_ids in g_bill_map.items(): while len(g_bill_ids) > 0: g_bill_id = list(sorted(g_bill_ids))[0] g_bill_ids.remove(g_bill_id) g_bill = sess.query(GBill).filter(GBill.id == g_bill_id).one() problem = '' g_supply = g_bill.g_supply read_dict = defaultdict(set) for g_read in g_bill.g_reads: if not all(g_read.msn == era.msn for era in g_supply.find_g_eras( sess, g_read.prev_date, g_read.pres_date)): problem += "The MSN " + g_read.msn + \ " of the register read " + str(g_read.id) + \ " doesn't match the MSN of all the relevant eras." for dt, typ in [(g_read.pres_date, g_read.pres_type), (g_read.prev_date, g_read.prev_type)]: typ_set = read_dict[str(dt) + "-" + g_read.msn] typ_set.add(typ) if len(typ_set) > 1: problem += " Reads taken on " + str(dt) + \ " have differing read types." g_era = g_supply.find_g_era_at(sess, g_bill.finish_date) if g_era is None: csv_writer.writerow( ["Extraordinary! There isn't a era for this bill!"]) continue vals = { 'covered_vat_gbp': Decimal('0.00'), 'covered_net_gbp': Decimal('0.00'), 'covered_gross_gbp': Decimal('0.00'), 'covered_kwh': Decimal(0), 'covered_start': g_bill.start_date, 'covered_finish': g_bill.finish_date, 'covered_bill_ids': [] } covered_primary_bill = None enlarged = True while enlarged: enlarged = False covered_bills = OrderedDict( (b.id, b) for b in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= vals['covered_finish'], GBill.finish_date >= vals['covered_start']). order_by(GBill.issue_date.desc(), GBill.start_date)) num_covered = None while num_covered != len(covered_bills): num_covered = len(covered_bills) for a, b in combinations(tuple(covered_bills.values()), 2): if all((a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross)): for gb_id in a.id, b.id: del covered_bills[gb_id] if gb_id in g_bill_ids: g_bill_ids.remove(gb_id) break for covered_bill in covered_bills.values(): if covered_primary_bill is None and \ len(covered_bill.g_reads) > 0: covered_primary_bill = covered_bill if covered_bill.start_date < vals['covered_start']: vals['covered_start'] = covered_bill.start_date enlarged = True break if covered_bill.finish_date > vals['covered_finish']: vals['covered_finish'] = covered_bill.finish_date enlarged = True break if len(covered_bills) == 0: continue for covered_bill in covered_bills.values(): if covered_bill.id in g_bill_ids: g_bill_ids.remove(covered_bill.id) vals['covered_bill_ids'].append(covered_bill.id) bdown = covered_bill.make_breakdown() vals['covered_kwh'] += covered_bill.kwh vals['covered_net_gbp'] += covered_bill.net vals['covered_vat_gbp'] += covered_bill.vat vals['covered_gross_gbp'] += covered_bill.gross for title in bill_titles: k = 'covered_' + title v = bdown.get(title) if v is not None: if isinstance(v, list): if k not in vals: vals[k] = set() vals[k].update(set(v)) else: try: vals[k] += v except KeyError: vals[k] = v except TypeError: raise BadRequest("Problem with bill " + str(g_bill.id) + " and key " + str(k) + " and value " + str(v) + " for existing " + str(vals[k])) if title in ('correction_factor', 'calorific_value', 'unit_code', 'unit_factor'): if k not in vals: vals[k] = set() for g_read in covered_bill.g_reads: if title in ('unit_code', 'unit_factor'): g_unit = g_read.g_unit if title == 'unit_code': v = g_unit.code else: v = g_unit.factor else: v = getattr(g_read, title) vals[k].add(v) for g_era in sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date <= vals['covered_finish'], or_(GEra.finish_date == null(), GEra.finish_date >= vals['covered_start'])).distinct(): site = sess.query(Site).join(SiteGEra).filter( SiteGEra.is_physical == true(), SiteGEra.g_era == g_era).one() chunk_start = hh_max(vals['covered_start'], g_era.start_date) chunk_finish = hh_min(vals['covered_finish'], g_era.finish_date) data_source = chellow.g_engine.GDataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, report_context, covered_primary_bill) vbf(data_source) for k, v in data_source.bill.items(): vk = 'virtual_' + k try: if isinstance(v, set): vals[vk].update(v) else: vals[vk] += v except KeyError: vals[vk] = v except TypeError as detail: raise BadRequest("For key " + str(vk) + " and value " + str(v) + ". " + str(detail)) if g_bill.id not in covered_bills.keys(): g_bill = covered_bills[sorted(covered_bills.keys())[0]] vals['batch'] = g_bill.g_batch.reference vals['bill_reference'] = g_bill.reference vals['bill_type'] = g_bill.bill_type.code vals['bill_start_date'] = g_bill.start_date vals['bill_finish_date'] = g_bill.finish_date vals['mprn'] = g_supply.mprn vals['supply_name'] = g_supply.name vals['site_code'] = site.code vals['site_name'] = site.name for k, v in vals.items(): vals[k] = csv_make_val(v) for i, title in enumerate(titles): if title.startswith('difference_'): try: covered_val = float(vals[titles[i - 2]]) virtual_val = float(vals[titles[i - 1]]) vals[title] = covered_val - virtual_val except KeyError: vals[title] = None csv_writer.writerow([ (vals.get(k) if vals.get(k) is not None else '') for k in titles ]) except BadRequest as e: tmp_file.write("Problem: " + e.description) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: try: if sess is not None: sess.close() except BaseException: tmp_file.write("\nProblem closing session.") finally: tmp_file.close() os.rename(running_name, finished_name)
def content(g_supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'g_supply_virtual_bill_' + str(g_supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') g_supply = GSupply.get_by_id(sess, g_supply_id) forecast_dt = forecast_date() prev_titles = None for g_era in sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date < finish_date, or_(GEra.finish_date == null(), GEra.finish_date > start_date)).order_by(GEra.start_date): chunk_start = hh_max(g_era.start_date, start_date) chunk_finish = hh_min(g_era.finish_date, finish_date) site = sess.query(Site).join(SiteGEra).filter( SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one() ds = GDataSource(sess, chunk_start, chunk_finish, forecast_dt, g_era, caches, None) titles = [ 'MPRN', 'Site Code', 'Site Name', 'Account', 'From', 'To', '' ] output_line = [ g_supply.mprn, site.code, site.name, ds.account, hh_format(ds.start_date), hh_format(ds.finish_date), '' ] contract_titles = g_contract_func(caches, g_era.g_contract, 'virtual_bill_titles')() titles.extend(contract_titles) g_contract_func(caches, g_era.g_contract, 'virtual_bill')(ds) bill = ds.bill for title in contract_titles: if title in bill: output_line.append(csv_make_val(bill[title])) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "register_reads.csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") titles = ( "Duration Start", "Duration Finish", "Supply Id", "Import MPAN Core", "Export MPAN Core", "Batch Reference", "Bill Id", "Bill Reference", "Bill Issue Date", "Bill Type", "Register Read Id", "TPR", "Coefficient", "Previous Read Date", "Previous Read Value", "Previous Read Type", "Present Read Date", "Present Read Value", "Present Read Type", ) w.writerow(titles) month_pairs = list( c_months_u(finish_year=year, finish_month=month, months=months)) start_date, finish_date = month_pairs[0][0], month_pairs[-1][-1] supplies = (sess.query(Supply).join(Bill).join(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), )).order_by(Bill.supply_id).distinct()) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Bill.supply == supply) for supply in supplies: supply_id = supply.id for bill, batch, bill_type in (sess.query( Bill, Batch, BillType).join(Batch).join(BillType).join( RegisterRead).filter( Bill.supply == supply, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), )): era = supply.find_era_at(sess, bill.start_date) if era is None: eras = (sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all()) if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] for read in (sess.query(RegisterRead).filter( RegisterRead.bill == bill, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), ).options( joinedload(RegisterRead.tpr), joinedload(RegisterRead.previous_type), joinedload(RegisterRead.present_type), )): vals = [ start_date, finish_date, supply_id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, bill.issue_date, bill_type.code, read.id, "md" if read.tpr is None else read.tpr.code, read.coefficient, read.previous_date, read.previous_value, read.previous_type.code, read.present_date, read.present_value, read.present_type.code, ] w.writerow(csv_make_val(v) for v in vals) # Avoid a long-running transaction sess.rollback() except BadRequest as e: w.writerow([e.description]) except BaseException: msg = traceback.format_exc() f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content( start_year, start_month, start_day, finish_year, finish_month, finish_day, is_import, supply_id, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'daily_supplier_virtual_bill.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') start_date = Datetime( start_year, start_month, start_day, tzinfo=pytz.utc) finish_date = Datetime( finish_year, finish_month, finish_day, tzinfo=pytz.utc) + \ relativedelta(days=1) - HH supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() day_start = start_date header_titles = [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', 'Is Forecast?' ] bill_titles = [] # Find titles for era in sess.query(Era).filter( Era.supply == supply, Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)): if is_import: cont = era.imp_supplier_contract else: cont = era.exp_supplier_contract for title in chellow.computer.contract_func( caches, cont, 'virtual_bill_titles')(): if title not in bill_titles: bill_titles.append(title) ssc = era.ssc if ssc is not None: for mr in ssc.measurement_requirements: for suffix in ('-kwh', '-rate', '-gbp'): title = mr.tpr.code + suffix if title not in bill_titles: bill_titles.append(title) writer.writerow(header_titles + bill_titles) while not day_start > finish_date: day_finish = day_start + relativedelta(days=1) - HH for era in supply.find_eras(sess, day_start, day_finish): chunk_start = hh_max(era.start_date, day_start) chunk_finish = hh_min(era.finish_date, day_finish) ss = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() row = [ ss.mpan_core, site.code, site.name, ss.supplier_account, hh_format(ss.start_date), hh_format(ss.finish_date), ss.years_back > 0] chellow.computer.contract_func( caches, ss.supplier_contract, 'virtual_bill')(ss) bill = ss.supplier_bill for title in bill_titles: if title in bill: row.append(csv_make_val(bill[title])) del bill[title] else: row.append('') for k in sorted(bill.keys()): row.append(k) row.append(csv_make_val(bill[k])) writer.writerow(row) day_start += relativedelta(days=1) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "supply_virtual_bills_" + str(supply_id) + ".csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None for era in (sess.query(Era).filter( Era.supply == supply, Era.start_date < finish_date, or_(Era.finish_date == null(), Era.finish_date > start_date), ).order_by(Era.start_date)): chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) site = (sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one()) ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, era.imp_supplier_contract is not None, caches, ) titles = [ "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Account", "From", "To", "", ] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), "", ] mop_titles = ds.contract_func(era.mop_contract, "virtual_bill_titles")() titles.extend(["mop-" + t for t in mop_titles]) ds.contract_func(era.mop_contract, "virtual_bill")(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append("") for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append("") dc_titles = ds.contract_func(era.dc_contract, "virtual_bill_titles")() titles.append("") titles.extend(["dc-" + t for t in dc_titles]) ds.contract_func(era.dc_contract, "virtual_bill")(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, "")) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join( Ssc).join(Era).filter( Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), ).order_by(Tpr.code).distinct()) if era.imp_supplier_contract is not None: output_line.append("") supplier_titles = ds.contract_func(era.imp_supplier_contract, "virtual_bill_titles")() for tpr in tpr_query.filter( Era.imp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): supplier_titles.append(tpr.code + suffix) titles.append("") titles.extend(["imp-supplier-" + t for t in supplier_titles]) ds.contract_func(era.imp_supplier_contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append("") for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource(sess, chunk_start, chunk_finish, forecast_date, era, False, caches) output_line.append("") supplier_titles = ds.contract_func(era.exp_supplier_contract, "virtual_bill_titles")() for tpr in tpr_query.filter( Era.exp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): supplier_titles.append(tpr.code + suffix) titles.append("") titles.extend(["exp-supplier-" + t for t in supplier_titles]) ds.contract_func(era.exp_supplier_contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, "")) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): output_line[i] = csv_make_val(val) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(batch_id, bill_id, contract_id, start_date, finish_date, user): caches = {} tmp_file = sess = bill = None forecast_date = to_utc(Datetime.max) sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'bill_check.csv', user) tmp_file = open(running_name, mode='w', newline='') writer = csv.writer(tmp_file, lineterminator='\n') bills = sess.query(Bill).order_by( Bill.supply_id, Bill.reference).options( joinedload(Bill.supply), subqueryload(Bill.reads).joinedload(RegisterRead.present_type), subqueryload(Bill.reads).joinedload(RegisterRead.previous_type), joinedload(Bill.batch)) if batch_id is not None: batch = Batch.get_by_id(sess, batch_id) bills = bills.filter(Bill.batch == batch) contract = batch.contract elif bill_id is not None: bill = Bill.get_by_id(sess, bill_id) bills = bills.filter(Bill.id == bill.id) contract = bill.batch.contract elif contract_id is not None: contract = Contract.get_by_id(sess, contract_id) bills = bills.join(Batch).filter( Batch.contract == contract, Bill.start_date <= finish_date, Bill.finish_date >= start_date) market_role_code = contract.market_role.code vbf = chellow.computer.contract_func(caches, contract, 'virtual_bill') if vbf is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill.") virtual_bill_titles_func = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles') if virtual_bill_titles_func is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill_titles.") virtual_bill_titles = virtual_bill_titles_func() titles = [ 'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp', 'bill-vat-gbp', 'bill-start-date', 'bill-finish-date', 'imp-mpan-core', 'exp-mpan-core', 'site-code', 'site-name', 'covered-from', 'covered-to', 'covered-bills', 'metered-kwh'] for t in virtual_bill_titles: titles.append('covered-' + t) titles.append('virtual-' + t) if t.endswith('-gbp'): titles.append('difference-' + t) writer.writerow(titles) bill_map = defaultdict(set, {}) for bill in bills: bill_map[bill.supply.id].add(bill.id) for supply_id, bill_ids in bill_map.items(): gaps = {} data_sources = {} while len(bill_ids) > 0: bill_id = list(sorted(bill_ids))[0] bill_ids.remove(bill_id) bill = sess.query(Bill).filter(Bill.id == bill_id).options( joinedload(Bill.batch), joinedload(Bill.bill_type), joinedload(Bill.reads), joinedload(Bill.supply), joinedload(Bill.reads).joinedload( RegisterRead.present_type), joinedload(Bill.reads).joinedload( RegisterRead.previous_type)).one() virtual_bill = {'problem': ''} supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace( minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras( sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: virtual_bill['problem'] += "The MSN " + read_msn + \ " of the register read " + str(read.id) + \ " doesn't match the MSN of the era." for dt, typ in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type)]: key = str(dt) + "-" + read.msn try: if typ != read_dict[key]: virtual_bill['problem'] += " Reads taken " + \ "on " + str(dt) + \ " have differing read types." except KeyError: read_dict[key] = typ bill_start = bill.start_date bill_finish = bill.finish_date covered_start = bill_start covered_finish = bill_finish covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0} vb_elems = set() enlarged = True while enlarged: enlarged = False covered_elems = find_elements(bill) covered_bills = OrderedDict( (b.id, b) for b in sess.query(Bill).join(Batch). join(Contract).join(MarketRole).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code).order_by( Bill.start_date, Bill.issue_date)) while True: to_del = None for a, b in combinations(covered_bills.values(), 2): if all( ( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross)): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del covered_bills[k] for k, covered_bill in tuple(covered_bills.items()): elems = find_elements(covered_bill) if elems.isdisjoint(covered_elems): if k != bill.id: del covered_bills[k] continue else: covered_elems.update(elems) if covered_bill.start_date < covered_start: covered_start = covered_bill.start_date enlarged = True break if covered_bill.finish_date > covered_finish: covered_finish = covered_bill.finish_date enlarged = True break if len(covered_bills) == 0: continue primary_covered_bill = None for covered_bill in covered_bills.values(): if covered_bill.id in bill_ids: bill_ids.remove(covered_bill.id) covered_bdown['net-gbp'] += float(covered_bill.net) covered_bdown['vat-gbp'] += float(covered_bill.vat) covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh) covered_rates = defaultdict(set) for k, v in loads(covered_bill.breakdown).items(): if k in ('raw_lines', 'raw-lines'): continue if isinstance(v, list): covered_rates[k].update(set(v)) else: if isinstance(v, Decimal): v = float(v) try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " in " + str( [ b.id for b in covered_bills.values() ]) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) if k.endswith('-gbp'): elem = k[:-4] covered_elems.add(elem) add_gap( caches, gaps, elem, covered_bill.start_date, covered_bill.finish_date, False, v) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None if primary_covered_bill is None or ( ( covered_bill.finish_date - covered_bill.start_date) > ( primary_covered_bill.finish_date - primary_covered_bill.start_date)): primary_covered_bill = covered_bill metered_kwh = 0 for era in sess.query(Era).filter( Era.supply == supply, Era.start_date <= covered_finish, or_( Era.finish_date == null(), Era.finish_date >= covered_start) ).distinct().options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.supply).joinedload(Supply.dno), joinedload(Era.supply).joinedload(Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.source)): chunk_start = hh_max(covered_start, era.start_date) chunk_finish = hh_min(covered_finish, era.finish_date) if contract not in ( era.mop_contract, era.dc_contract, era.imp_supplier_contract, era.exp_supplier_contract): virtual_bill['problem'] += ''.join( ( "From ", hh_format(chunk_start), " to ", hh_format(chunk_finish), " the contract of ", "the era doesn't match the contract of the ", "bill.")) continue if contract.market_role.code == 'X': polarity = contract != era.exp_supplier_contract else: polarity = era.imp_supplier_contract is not None ''' pairs = [] last_finish = chunk_start - HH for hd in chellow.computer.datum_range( sess, caches, 0, chunk_start, chunk_finish): if hd['utc-is-month-end'] or hd['ct-is-month-end']: end_date = hd['start-date'] pairs.append((last_finish + HH, end_date)) last_finish = end_date if hd['start-date'] > last_finish: pairs.append((last_finish + HH, hd['start-date'])) for ss_start, ss_finish in pairs: ''' try: ds_key = ( chunk_start, chunk_finish, forecast_date, era.id, polarity, primary_covered_bill.id) data_source = data_sources[ds_key] except KeyError: data_source = data_sources[ds_key] = \ chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches, primary_covered_bill) vbf(data_source) if data_source.measurement_type == 'hh': metered_kwh += sum( h['msp-kwh'] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches) metered_kwh += sum( h['msp-kwh'] for h in ds.hh_data) if market_role_code == 'X': vb = data_source.supplier_bill elif market_role_code == 'C': vb = data_source.dc_bill elif market_role_code == 'M': vb = data_source.mop_bill else: raise BadRequest("Odd market role.") for k, v in vb.items(): try: if isinstance(v, set): virtual_bill[k].update(v) else: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " and value " + str(v) + ". " + str(detail)) if all((k.endswith('-gbp'), k != 'net-gbp', v != 0)): add_gap( caches, gaps, k[:-4], chunk_start, chunk_finish, True, v) for k in virtual_bill.keys(): if k.endswith('-gbp'): vb_elems.add(k[:-4]) long_map = {} vb_keys = set(virtual_bill.keys()) for elem in sorted(vb_elems, key=len, reverse=True): els = long_map[elem] = set() for k in tuple(vb_keys): if k.startswith(elem + '-'): els.add(k) vb_keys.remove(k) for elem in vb_elems.difference(covered_elems): for k in long_map[elem]: del virtual_bill[k] try: del virtual_bill['net-gbp'] except KeyError: pass virtual_bill['net-gbp'] = sum( v for k, v in virtual_bill.items() if k.endswith('-gbp')) era = supply.find_era_at(sess, bill_finish) if era is None: imp_mpan_core = exp_mpan_core = None site_code = site_name = None virtual_bill['problem'] += \ "This bill finishes before or after the supply. " else: imp_mpan_core = era.imp_mpan_core exp_mpan_core = era.exp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one() site_code = site.code site_name = site.name # Find bill to use for header data if bill.id not in covered_bills: for cbill in covered_bills.values(): if bill.batch == cbill.batch: bill = cbill values = [ bill.batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, hh_format(bill_start), hh_format(bill_finish), imp_mpan_core, exp_mpan_core, site_code, site_name, hh_format(covered_start), hh_format(covered_finish), ':'.join( str(i).replace(',', '') for i in covered_bills.keys()), metered_kwh] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append('') try: virt_val = csv_make_val(virtual_bill[title]) values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = 0 values.append('') if title.endswith('-gbp'): if isinstance(virt_val, (int, float, Decimal)): if isinstance(cov_val, (int, float, Decimal)): values.append(float(cov_val) - float(virt_val)) else: values.append(0 - float(virt_val)) else: values.append('') for title in sorted(virtual_bill.keys()): virt_val = csv_make_val(virtual_bill[title]) values += ['virtual-' + title, virt_val] if title in covered_bdown: values += ['covered-' + title, covered_bdown[title]] else: values += ['', ''] writer.writerow(values) for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start): for k, v in loads(bill.breakdown).items(): if k.endswith('-gbp'): add_gap( caches, gaps, k[:-4], bill.start_date, bill.finish_date, False, v) # Avoid long-running transactions sess.rollback() clumps = [] for element, elgap in sorted(gaps.items()): for start_date, hhgap in sorted(elgap.items()): if hhgap['has_virtual'] and not hhgap['has_covered']: if len(clumps) == 0 or not all( ( clumps[-1]['element'] == element, clumps[-1]['finish_date'] + HH == start_date)): clumps.append( { 'element': element, 'start_date': start_date, 'finish_date': start_date, 'gbp': hhgap['gbp']}) else: clumps[-1]['finish_date'] = start_date for i, clump in enumerate(clumps): vals = dict((title, '') for title in titles) vals['covered-problem'] = '_'.join( ( 'missing', clump['element'], 'supplyid', str(supply.id), 'from', hh_format(clump['start_date']))) vals['imp-mpan-core'] = imp_mpan_core vals['exp-mpan-core'] = exp_mpan_core vals['batch'] = 'missing_bill' vals['bill-start-date'] = hh_format(clump['start_date']) vals['bill-finish-date'] = hh_format(clump['finish_date']) vals['difference-net-gbp'] = clump['gbp'] writer.writerow(vals[title] for title in titles) # Avoid long-running transactions sess.rollback() except BadRequest as e: if bill is None: prefix = "Problem: " else: prefix = "Problem with bill " + str(bill.id) + ':' tmp_file.write(prefix + e.description) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def create_csv(f, sess, start_date, finish_date, contract_id): caches = {} writer = csv.writer(f, lineterminator="\n") contract = Contract.get_supplier_by_id(sess, contract_id) forecast_date = chellow.computer.forecast_date() start_date_ct, finish_date_ct = to_ct(start_date), to_ct(finish_date) month_pairs = c_months_u( start_year=start_date_ct.year, start_month=start_date_ct.month, finish_year=finish_date_ct.year, finish_month=finish_date_ct.month, ) bill_titles = contract_func(caches, contract, "virtual_bill_titles")() for tpr in ( sess.query(Tpr) .join(MeasurementRequirement) .join(Ssc) .join(Era) .filter( Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), or_( Era.imp_supplier_contract == contract, Era.exp_supplier_contract == contract, ), ) .order_by(Tpr.code) .distinct() ): for suffix in ("-kwh", "-rate", "-gbp"): bill_titles.append(tpr.code + suffix) writer.writerow( ["MPAN Core", "Site Code", "Site Name", "Account", "From", "To"] + bill_titles ) vb_func = contract_func(caches, contract, "virtual_bill") for month_start, month_finish in month_pairs: period_start = hh_max(start_date, month_start) period_finish = hh_min(finish_date, month_finish) for era in ( sess.query(Era) .filter( or_( Era.imp_supplier_contract == contract, Era.exp_supplier_contract == contract, ), Era.start_date <= period_finish, or_(Era.finish_date == null(), Era.finish_date >= period_start), ) .order_by(Era.imp_mpan_core) ): try: vals = _process_era( sess, caches, vb_func, forecast_date, bill_titles, contract, period_start, period_finish, era, ) writer.writerow(csv_make_val(v) for v in vals) except BadRequest as e: raise BadRequest( f"Problem with {chellow.utils.url_root}eras/{era.id}/edit " f"{e.description}" )
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None for era in sess.query(Era).filter( Era.supply == supply, Era.start_date < finish_date, or_( Era.finish_date == null(), Era.finish_date > start_date)).order_by(Era.start_date): chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches) titles = [ 'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', ''] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) tpr_query = sess.query(Tpr).join(MeasurementRequirement). \ join(Ssc).join(Era).filter( Era.start_date <= chunk_finish, or_( Era.finish_date == null(), Era.finish_date >= chunk_start) ).order_by(Tpr.code).distinct() if era.imp_supplier_contract is not None: output_line.append('') supplier_titles = ds.contract_func( era.imp_supplier_contract, 'virtual_bill_titles')() for tpr in tpr_query.filter( Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): supplier_titles.append(tpr.code + suffix) titles.append('') titles.extend(['imp-supplier-' + t for t in supplier_titles]) ds.contract_func(era.imp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, False, caches) output_line.append('') supplier_titles = ds.contract_func( era.exp_supplier_contract, 'virtual_bill_titles')() for tpr in tpr_query.filter( Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): supplier_titles.append(tpr.code + suffix) titles.append('') titles.extend(['exp-supplier-' + t for t in supplier_titles]) ds.contract_func( era.exp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): output_line[i] = csv_make_val(val) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "batches.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "utility", "chellow_id", "reference", "description", "contract_name", "num_bills", "net_gbp", "vat_gbp", "gross_gbp", "kwh", "min_start_date", ) writer.writerow(titles) for batch, contract in (sess.query(Batch, Contract).join(Contract).order_by( Batch.contract_id, Batch.reference)): ( num_bills, sum_net_gbp, sum_vat_gbp, sum_gross_gbp, sum_kwh, min_start_date, ) = (sess.query( func.count(Bill.id), func.sum(Bill.net), func.sum(Bill.vat), func.sum(Bill.gross), func.sum(Bill.kwh), func.min(Bill.start_date), ).filter(Bill.batch == batch).one()) if sum_net_gbp is None: sum_net_gbp = sum_vat_gbp = sum_gross_gbp = sum_kwh = 0 vals = { "utility": "electricity", "chellow_id": batch.id, "reference": batch.reference, "description": batch.description, "contract_name": contract.name, "num_bills": num_bills, "net_gbp": sum_net_gbp, "vat_gbp": sum_vat_gbp, "gross_gbp": sum_gross_gbp, "kwh": sum_kwh, "min_start_date": min_start_date, } writer.writerow(csv_make_val(vals[t]) for t in titles) # Avoid a long-running transaction sess.rollback() for g_batch, g_contract in (sess.query( GBatch, GContract).join(GContract).order_by(GBatch.g_contract_id, GBatch.reference)): ( num_bills, sum_net_gbp, sum_vat_gbp, sum_gross_gbp, sum_kwh, min_start_date, ) = (sess.query( func.count(GBill.id), func.sum(GBill.net), func.sum(GBill.vat), func.sum(GBill.gross), func.sum(GBill.kwh), func.min(GBill.start_date), ).filter(GBill.g_batch == g_batch).one()) if sum_net_gbp is None: sum_net_gbp = sum_vat_gbp = sum_gross_gbp = sum_kwh = 0 vals = { "utility": "gas", "chellow_id": g_batch.id, "reference": g_batch.reference, "description": g_batch.description, "contract_name": g_contract.name, "num_bills": num_bills, "net_gbp": sum_net_gbp, "vat_gbp": sum_vat_gbp, "gross_gbp": sum_gross_gbp, "kwh": sum_kwh, "min_start_date": min_start_date, } writer.writerow(csv_make_val(vals[t]) for t in titles) # Avoid a long-running transaction sess.rollback() except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _write_sites(sess, caches, writer, year, site_id): titles = ( "Site Code", "Site Name", "Displaced TRIAD 1 Date", "Displaced TRIAD 1 MSP kW", "Displaced TRIAD LAF", "Displaced TRIAD 1 GSP kW", "Displaced TRIAD 2 Date", "Displaced TRIAD 2 MSP kW", "Displaced TRIAD 2 LAF", "Displaced TRIAD 2 GSP kW", "Displaced TRIAD 3 Date", "Displaced TRIAD 3 MSP kW", "Displaced TRIAD 3 LAF", "Displaced TRIAD 3 GSP kW", "Displaced GSP kW", "Displaced Rate GBP / kW", "GBP", ) writer.writerow(titles) march_finish_ct = ct_datetime(year, 4, 1) - HH march_finish_utc = to_utc(march_finish_ct) march_start_ct = ct_datetime(year, 3, 1) march_start_utc = to_utc(march_start_ct) year_start = to_utc(ct_datetime(year - 1, 4, 1)) forecast_date = chellow.computer.forecast_date() sites = _make_sites(sess, year_start, march_finish_utc, site_id, ("gen", "gen-net")) scalar_names = {"triad-actual-gsp-kw", "triad-actual-gbp"} rate_names = {"triad-actual-rate", "triad-estimate-rate"} for i in range(1, 4): pref = "triad-actual-" + str(i) + "-" for suf in ("msp-kw", "gsp-kw"): scalar_names.add(pref + suf) for suf in ("date", "status", "laf"): rate_names.add(pref + suf) for site in sites: displaced_era = None for month_start, month_finish in sorted(c_months_u(start_year=year - 1, start_month=4, months=12), reverse=True): displaced_era = chellow.computer.displaced_era( sess, caches, site, month_start, month_finish, forecast_date) if displaced_era is not None: break if displaced_era is None: break site_ds = chellow.computer.SiteSource( sess, site, march_start_utc, march_finish_utc, forecast_date, caches, displaced_era, ) chellow.duos.duos_vb(site_ds) chellow.triad.hh(site_ds) for hh in site_ds.hh_data: bill_hh = site_ds.supplier_bill_hhs[hh["start-date"]] for k in scalar_names & hh.keys(): bill_hh[k] = hh[k] for k in rate_names & hh.keys(): bill_hh[k] = {hh[k]} bill = reduce_bill_hhs(site_ds.supplier_bill_hhs) values = [site.code, site.name] for i in range(1, 4): triad_prefix = "triad-actual-" + str(i) + "-" for suffix in ("date", "msp-kw", "laf", "gsp-kw"): values.append(csv_make_val(bill[triad_prefix + suffix])) for suffix in ("gsp-kw", "rate", "gbp"): values.append(csv_make_val(bill["triad-actual-" + suffix])) writer.writerow(values) # Avoid long-running transaction sess.rollback()
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Date", "Import MPAN Core", "Export MPAN Core", "Physical Site Id", "Physical Site Name", "Other Site Ids", "Other Site Names", "Supply Id", "Source", "Generator Type", "GSP Group", "DNO Name", "Voltage Level", "Is Substations", "Metering Type", "Mandatory HH", "PC", "MTC", "CoP", "SSC Code", "SSC Description", "Energisation Status", "Number Of Registers", "MOP Contract", "Mop Account", "DC Contract", "DC Account", "Meter Serial Number", "Meter Installation Date", "Latest Normal Meter Read Date", "Latest Normal Meter Read Type", "Latest DC Bill Date", "Latest MOP Bill Date", "Supply Start Date", "Supply Finish Date", "Properties", "Import ACTIVE?", "Import REACTIVE_IMPORT?", "Import REACTIVE_EXPORT?", "Export ACTIVE?", "Export REACTIVE_IMPORT?", "Export REACTIVE_EXPORT?", "Import Agreed Supply Capacity (kVA)", "Import LLFC Code", "Import LLFC Description", "Import Supplier Contract", "Import Supplier Account", "Import Mandatory kW", "Latest Import Supplier Bill Date", "Export Agreed Supply Capacity (kVA)", "Export LLFC Code", "Export LLFC Description", "Export Supplier Contract", "Export Supplier Account", "Export Mandatory kW", "Latest Export Supplier Bill Date", ) writer.writerow(titles) NORMAL_READ_TYPES = ("N", "C", "N3") year_start = date + HH - relativedelta(years=1) era_ids = (sess.query(Era.id).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date), ).order_by(Era.supply_id)) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) era_ids = era_ids.filter(Era.supply == supply) if mpan_cores is not None: era_ids = era_ids.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for (era_id, ) in era_ids: era, supply, generator_type = (sess.query( Era, Supply, GeneratorType).join( Supply, Era.supply_id == Supply.id).outerjoin( GeneratorType, Supply.generator_type_id == GeneratorType.id).filter( Era.id == era_id).options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras).joinedload( SiteEra.site), joinedload(Era.ssc), joinedload(Era.energisation_status), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.dno), ).one()) site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) sup_eras = (sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all()) supply_start_date = sup_eras[0].start_date supply_finish_date = sup_eras[-1].finish_date if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code is_substation = era.exp_llfc.is_substation else: voltage_level_code = era.imp_llfc.voltage_level.code is_substation = era.imp_llfc.is_substation if generator_type is None: generator_type_str = "" else: generator_type_str = generator_type.code metering_type = era.meter_category if metering_type in ("nhh", "amr"): latest_prev_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id, ).order_by(RegisterRead.previous_date.desc()).options( joinedload(RegisterRead.previous_type)).first()) latest_pres_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply, ).order_by(RegisterRead.present_date.desc()).options( joinedload(RegisterRead.present_type)).first()) if latest_prev_normal_read is None and latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif (latest_pres_normal_read is not None and latest_prev_normal_read is None): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code elif (latest_pres_normal_read is None and latest_prev_normal_read is not None): latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code elif (latest_pres_normal_read.present_date > latest_prev_normal_read.previous_date): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code else: latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = hh_format( latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) dc_contract = era.dc_contract dc_contract_name = dc_contract.name dc_account = era.dc_account latest_dc_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == dc_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_dc_bill_date is not None: latest_dc_bill_date = hh_format(latest_dc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel(sess, imp_related, channel_type) is None: channel_values.append("false") else: channel_values.append("true") imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == "nhh": continue params = { "supply_id": supply.id, "year_start": year_start, "year_finish": date, "is_import": is_import, } month_mds = tuple(md[0] * 2 for md in sess.execute( """ select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params, )) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = "yes" else: mandatory_hh = "no" imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in (True, False): for er in (sess.query(Era).filter( Era.supply == era.supply, Era.start_date <= date).order_by( Era.start_date.desc())): if is_import: if er.imp_mpan_core is None: break else: supplier_contract = er.imp_supplier_contract else: if er.exp_mpan_core is None: break else: supplier_contract = er.exp_supplier_contract latest_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.finish_date >= er.start_date, Bill.finish_date <= hh_min(er.finish_date, date), Bill.supply == supply, Batch.contract == supplier_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_bill_date is not None: latest_bill_date = hh_format(latest_bill_date[0]) if is_import: imp_latest_supplier_bill_date = latest_bill_date else: exp_latest_supplier_bill_date = latest_bill_date break meter_installation_date = (sess.query(func.min( Era.start_date)).filter(Era.supply == era.supply, Era.msn == era.msn).one()[0]) ssc = era.ssc if ssc is None: ssc_code = ssc_description = num_registers = None else: ssc_code, ssc_description = ssc.code, ssc.description num_registers = (sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == ssc).count()) vals = ([ date, era.imp_mpan_core, era.exp_mpan_core, physical_site.code, physical_site.name, ", ".join(site_codes), ", ".join(site_names), supply.id, supply.source.code, generator_type_str, supply.gsp_group.code, supply.dno.dno_code, voltage_level_code, is_substation, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, ssc_description, era.energisation_status.code, num_registers, mop_contract_name, mop_account, dc_contract_name, dc_account, era.msn, meter_installation_date, latest_normal_read_date, latest_normal_read_type, latest_dc_bill_date, latest_mop_bill_date, supply_start_date, supply_finish_date, era.properties, ] + channel_values + [ era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era.imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date, ] + [ era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era.exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date, ]) writer.writerow([csv_make_val(v) for v in vals]) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = supply_source = None try: sess = Session() contract = Contract.get_dc_by_id(sess, contract_id) finish_date = utc_datetime(end_year, end_month, 1) + MONTH - HH start_date = utc_datetime(end_year, end_month, 1) - relativedelta(months=months - 1) forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( 'dc_virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') bill_titles = chellow.computer.contract_func(caches, contract, 'virtual_bill_titles')() header_titles = [ 'Import MPAN Core', 'Export MPAN Core', 'Start Date', 'Finish Date' ] vb_func = chellow.computer.contract_func(caches, contract, 'virtual_bill') writer.writerow(header_titles + bill_titles) for era in sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.dc_contract == contract).options(joinedload( Era.channels)).order_by(Era.supply_id): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = '' is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = '' if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish) ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(csv_make_val(bill.get(title))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(csv_make_val(bill[k])) writer.writerow(vals) # Avoid long-running transactions sess.rollback() except BadRequest as e: msg = 'Problem ' if supply_source is not None: msg += "with supply " + supply_source.mpan_core + \ " starting at " + hh_format(supply_source.start_date) + " " msg += str(e) writer.writerow([msg]) except BaseException: msg = "Problem " + traceback.format_exc() + '\n' f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(contract_id, days_hidden, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "channel_snags.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Hidden Days", "Chellow Id", "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Snag Description", "Import Related?", "Channel Type", "Start Date", "Finish Date", "Is Ignored?", "Days Since Snag Finished", "Duration Of Snag (Days)", ) writer.writerow(titles) contract = Contract.get_dc_by_id(sess, contract_id) now = utc_datetime_now() cutoff_date = now - relativedelta(days=days_hidden) for snag, channel, era, supply, site_era, site in (sess.query( Snag, Channel, Era, Supply, SiteEra, Site).join(Channel, Snag.channel_id == Channel.id).join( Era, Channel.era_id == Era.id).join( Supply, Era.supply_id == Supply.id).join( SiteEra, Era.site_eras).join( Site, SiteEra.site_id == Site.id).filter( SiteEra.is_physical == true(), Era.dc_contract == contract, Snag.start_date < cutoff_date, ).order_by( Site.code, Supply.id, Channel.imp_related, Channel.channel_type, Snag.description, Snag.start_date, Snag.id, )): snag_start = snag.start_date snag_finish = snag.finish_date imp_mc = "" if era.imp_mpan_core is None else era.imp_mpan_core exp_mc = "" if era.exp_mpan_core is None else era.exp_mpan_core if snag_finish is None: duration = now - snag_start age_of_snag = None else: duration = snag_finish - snag_start if hh_before(cutoff_date, snag_finish): age_of_snag = None else: delta = now - snag_finish age_of_snag = delta.days vals = { "Hidden Days": days_hidden, "Chellow Id": snag.id, "Imp MPAN Core": imp_mc, "Exp MPAN Core": exp_mc, "Site Code": site.code, "Site Name": site.name, "Snag Description": snag.description, "Import Related?": channel.imp_related, "Channel Type": channel.channel_type, "Start Date": snag_start, "Finish Date": snag_finish, "Is Ignored?": snag.is_ignored, "Days Since Snag Finished": age_of_snag, "Duration Of Snag (Days)": duration.days, } writer.writerow(csv_make_val(vals[t]) for t in titles) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _process_g_bill_ids( sess, report_context, g_bill_ids, forecast_date, bill_titles, vbf, titles, csv_writer, ): g_bill_id = list(sorted(g_bill_ids))[0] g_bill_ids.remove(g_bill_id) g_bill = sess.query(GBill).filter(GBill.id == g_bill_id).one() problem = "" g_supply = g_bill.g_supply read_dict = defaultdict(set) for g_read in g_bill.g_reads: if not all(g_read.msn == era.msn for era in g_supply.find_g_eras( sess, g_read.prev_date, g_read.pres_date)): problem += ( f"The MSN {g_read.msn} of the register read {g_read.id} doesn't match " f"the MSN of all the relevant eras.") for dt, typ in [ (g_read.pres_date, g_read.pres_type), (g_read.prev_date, g_read.prev_type), ]: typ_set = read_dict[str(dt) + "-" + g_read.msn] typ_set.add(typ) if len(typ_set) > 1: problem += f" Reads taken on {dt} have differing read types." vals = { "covered_vat_gbp": Decimal("0.00"), "covered_net_gbp": Decimal("0.00"), "covered_gross_gbp": Decimal("0.00"), "covered_kwh": Decimal(0), "covered_start": g_bill.start_date, "covered_finish": g_bill.finish_date, "covered_bill_ids": [], } covered_primary_bill = None enlarged = True while enlarged: enlarged = False covered_bills = OrderedDict( (b.id, b) for b in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= vals["covered_finish"], GBill.finish_date >= vals["covered_start"], ).order_by(GBill.issue_date.desc(), GBill.start_date)) num_covered = None while num_covered != len(covered_bills): num_covered = len(covered_bills) for a, b in combinations(tuple(covered_bills.values()), 2): if all(( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross, )): for gb_id in a.id, b.id: del covered_bills[gb_id] if gb_id in g_bill_ids: g_bill_ids.remove(gb_id) break for covered_bill in covered_bills.values(): if covered_primary_bill is None and len(covered_bill.g_reads) > 0: covered_primary_bill = covered_bill if covered_bill.start_date < vals["covered_start"]: vals["covered_start"] = covered_bill.start_date enlarged = True break if covered_bill.finish_date > vals["covered_finish"]: vals["covered_finish"] = covered_bill.finish_date enlarged = True break if len(covered_bills) == 0: return for covered_bill in covered_bills.values(): if covered_bill.id in g_bill_ids: g_bill_ids.remove(covered_bill.id) vals["covered_bill_ids"].append(covered_bill.id) bdown = covered_bill.make_breakdown() vals["covered_kwh"] += covered_bill.kwh vals["covered_net_gbp"] += covered_bill.net vals["covered_vat_gbp"] += covered_bill.vat vals["covered_gross_gbp"] += covered_bill.gross for title in bill_titles: k = "covered_" + title v = bdown.get(title) if v is not None: if isinstance(v, list): if k not in vals: vals[k] = set() vals[k].update(set(v)) else: try: vals[k] += v except KeyError: vals[k] = v except TypeError: raise BadRequest( f"Problem with bill {g_bill.id} and key {k} and value {v} " f"for existing {vals[k]}") if title in ( "correction_factor", "calorific_value", "unit_code", "unit_factor", ): if k not in vals: vals[k] = set() for g_read in covered_bill.g_reads: if title in ("unit_code", "unit_factor"): g_unit = g_read.g_unit if title == "unit_code": v = g_unit.code else: v = g_unit.factor else: v = getattr(g_read, title) vals[k].add(v) for g_era in (sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date <= vals["covered_finish"], or_(GEra.finish_date == null(), GEra.finish_date >= vals["covered_start"]), ).distinct()): site = (sess.query(Site).join(SiteGEra).filter( SiteGEra.is_physical == true(), SiteGEra.g_era == g_era).one()) chunk_start = hh_max(vals["covered_start"], g_era.start_date) chunk_finish = hh_min(vals["covered_finish"], g_era.finish_date) data_source = chellow.g_engine.GDataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, report_context, covered_primary_bill, ) vbf(data_source) for k, v in data_source.bill.items(): vk = "virtual_" + k try: if isinstance(v, set): vals[vk].update(v) else: vals[vk] += v except KeyError: vals[vk] = v except TypeError as detail: raise BadRequest(f"For key {vk} and value {v}. {detail}") if g_bill.id not in covered_bills.keys(): g_bill = covered_bills[sorted(covered_bills.keys())[0]] vals["batch"] = g_bill.g_batch.reference vals["bill_reference"] = g_bill.reference vals["bill_type"] = g_bill.bill_type.code vals["bill_start_date"] = g_bill.start_date vals["bill_finish_date"] = g_bill.finish_date vals["mprn"] = g_supply.mprn vals["supply_name"] = g_supply.name vals["site_code"] = site.code vals["site_name"] = site.name for k, v in vals.items(): if k == "covered_bill_ids": vals[k] = " | ".join(str(b) for b in v) else: vals[k] = csv_make_val(v) for i, title in enumerate(titles): if title.startswith("difference_"): try: covered_val = float(vals[titles[i - 2]]) virtual_val = float(vals[titles[i - 1]]) vals[title] = covered_val - virtual_val except KeyError: vals[title] = None csv_writer.writerow([(vals.get(k) if vals.get(k) is not None else "") for k in titles])
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( f"supply_virtual_bills_hh_{supply_id}.csv", user ) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") mop_titles = [] dc_titles = [] imp_supplier_titles = [] exp_supplier_titles = [] for era in sess.execute( select(Era).where( Era.supply == supply, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ) ).scalars(): ds = chellow.computer.SupplySource( sess, era.start_date, era.start_date, forecast_date, era, True, caches ) for t in ds.contract_func(era.mop_contract, "virtual_bill_titles")(): if t not in mop_titles: mop_titles.append(t) for t in ds.contract_func(era.dc_contract, "virtual_bill_titles")(): if t not in dc_titles: dc_titles.append(t) if era.imp_supplier_contract is not None: for t in ds.contract_func( era.imp_supplier_contract, "virtual_bill_titles" )(): if t not in imp_supplier_titles: imp_supplier_titles.append(t) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, era.start_date, era.start_date, forecast_date, era, False, caches, ) for t in ds.contract_func( era.exp_supplier_contract, "virtual_bill_titles" )(): if t not in exp_supplier_titles: exp_supplier_titles.append(t) titles = [ "mpan_core", "site_code", "site_name", "hh_start", ] for pref, t in ( ("mop", mop_titles), ("dc", dc_titles), ("imp_supplier", imp_supplier_titles), ("exp_supplier", exp_supplier_titles), ): titles.append("") titles.extend([f"{pref}_{n}" for n in t]) w.writerow(titles) for hh_start in hh_range(caches, start_date, finish_date): era = sess.execute( select(Era).where( Era.supply == supply, Era.start_date <= hh_start, or_(Era.finish_date == null(), Era.finish_date >= hh_start), ) ).scalar_one() site = sess.execute( select(Site) .join(SiteEra) .where(SiteEra.era == era, SiteEra.is_physical == true()) ).scalar_one() ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, True, caches ) vals = { "mpan_core": ds.mpan_core, "site_code": site.code, "site_name": site.name, "hh_start": hh_format(ds.start_date), } ds.contract_func(era.mop_contract, "virtual_bill")(ds) for k, v in ds.mop_bill.items(): vals[f"mop_{k}"] = v ds.contract_func(era.dc_contract, "virtual_bill")(ds) for k, v in ds.dc_bill.items(): vals[f"dc_{k}"] = v if era.imp_supplier_contract is not None: ds.contract_func(era.imp_supplier_contract, "virtual_bill")(ds) for k, v in ds.supplier_bill.items(): vals[f"imp_supplier_{k}"] = v if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, False, caches ) ds.contract_func(era.exp_supplier_contract, "virtual_bill")(ds) for k, v in ds.supplier_bill.items(): vals[f"exp_supplier_{k}"] = v w.writerow([csv_make_val(vals.get(t)) for t in titles]) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _process_supply( sess, caches, supply_id, bill_map, forecast_date, contract, vbf, virtual_bill_titles, writer, titles, report_run, ): gaps = {} data_sources = {} market_role_code = contract.market_role.code bill_ids = bill_map[supply_id] while len(bill_ids) > 0: bill_id = list(sorted(bill_ids))[0] bill_ids.remove(bill_id) bill = (sess.query(Bill).filter(Bill.id == bill_id).options( joinedload(Bill.batch), joinedload(Bill.bill_type), joinedload(Bill.reads), joinedload(Bill.supply), joinedload(Bill.reads).joinedload(RegisterRead.present_type), joinedload(Bill.reads).joinedload(RegisterRead.previous_type), ).one()) virtual_bill = {"problem": ""} supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: virtual_bill["problem"] += ( "The MSN " + read_msn + " of the register read " + str(read.id) + " doesn't match the MSN of the era.") for dt, typ in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type), ]: key = str(dt) + "-" + read.msn try: if typ != read_dict[key]: virtual_bill["problem"] += ( " Reads taken " + "on " + str(dt) + " have differing read types.") except KeyError: read_dict[key] = typ bill_start = bill.start_date bill_finish = bill.finish_date covered_start = bill_start covered_finish = bill_start covered_bdown = {"sum-msp-kwh": 0, "net-gbp": 0, "vat-gbp": 0} vb_elems = set() enlarged = True while enlarged: enlarged = False covered_elems = find_elements(bill) covered_bills = OrderedDict((b.id, b) for b in sess.query( Bill).join(Batch).join(Contract).join(MarketRole).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code, ).order_by(Bill.start_date, Bill.issue_date)) while True: to_del = None for a, b in combinations(covered_bills.values(), 2): if all(( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross, )): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del covered_bills[k] bill_ids.discard(k) for k, covered_bill in tuple(covered_bills.items()): elems = find_elements(covered_bill) if elems.isdisjoint(covered_elems): if k != bill.id: del covered_bills[k] continue else: covered_elems.update(elems) if covered_bill.start_date < covered_start: covered_start = covered_bill.start_date enlarged = True break if covered_bill.finish_date > covered_finish: covered_finish = covered_bill.finish_date enlarged = True break if len(covered_bills) == 0: continue primary_covered_bill = None for covered_bill in covered_bills.values(): bill_ids.discard(covered_bill.id) covered_bdown["net-gbp"] += float(covered_bill.net) covered_bdown["vat-gbp"] += float(covered_bill.vat) covered_bdown["sum-msp-kwh"] += float(covered_bill.kwh) covered_rates = defaultdict(set) for k, v in loads(covered_bill.breakdown).items(): if k in ("raw_lines", "raw-lines"): continue if isinstance(v, list): covered_rates[k].update(set(v)) else: if isinstance(v, Decimal): v = float(v) try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " in " + str([b.id for b in covered_bills.values()]) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) if k.endswith("-gbp"): elem = k[:-4] covered_elems.add(elem) add_gap( caches, gaps, elem, covered_bill.start_date, covered_bill.finish_date, False, v, ) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None if primary_covered_bill is None or ( (covered_bill.finish_date - covered_bill.start_date) > (primary_covered_bill.finish_date - primary_covered_bill.start_date)): primary_covered_bill = covered_bill metered_kwh = 0 for era in (sess.query(Era).filter( Era.supply == supply, Era.start_date <= covered_finish, or_(Era.finish_date == null(), Era.finish_date >= covered_start), ).distinct().options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.supply).joinedload(Supply.dno), joinedload(Era.supply).joinedload(Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.source), )): chunk_start = hh_max(covered_start, era.start_date) chunk_finish = hh_min(covered_finish, era.finish_date) if contract not in ( era.mop_contract, era.dc_contract, era.imp_supplier_contract, era.exp_supplier_contract, ): virtual_bill["problem"] += "".join(( "From ", hh_format(chunk_start), " to ", hh_format(chunk_finish), " the contract of ", "the era doesn't match the contract of the ", "bill.", )) continue if contract.market_role.code == "X": polarity = contract != era.exp_supplier_contract else: polarity = era.imp_supplier_contract is not None try: ds_key = ( chunk_start, chunk_finish, forecast_date, era.id, polarity, primary_covered_bill.id, ) data_source = data_sources[ds_key] except KeyError: data_source = data_sources[ ds_key] = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches, primary_covered_bill, ) vbf(data_source) if data_source.measurement_type == "hh": metered_kwh += sum(h["msp-kwh"] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, caches, ) metered_kwh += sum(h["msp-kwh"] for h in ds.hh_data) if market_role_code == "X": vb = data_source.supplier_bill vb_hhs = data_source.supplier_bill_hhs elif market_role_code == "C": vb = data_source.dc_bill vb_hhs = data_source.dc_bill_hhs elif market_role_code == "M": vb = data_source.mop_bill vb_hhs = data_source.mop_bill_hhs else: raise BadRequest("Odd market role.") for k, v in vb.items(): try: if isinstance(v, set): virtual_bill[k].update(v) else: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest("For key " + str(k) + " and value " + str(v) + ". " + str(detail)) for dt, bl in vb_hhs.items(): for k, v in bl.items(): if all((k.endswith("-gbp"), k != "net-gbp", v != 0)): add_gap(caches, gaps, k[:-4], dt, dt, True, v) for k in virtual_bill.keys(): if k.endswith("-gbp"): vb_elems.add(k[:-4]) long_map = {} vb_keys = set(virtual_bill.keys()) for elem in sorted(vb_elems, key=len, reverse=True): els = long_map[elem] = set() for k in tuple(vb_keys): if k.startswith(elem + "-"): els.add(k) vb_keys.remove(k) for elem in vb_elems.difference(covered_elems): for k in long_map[elem]: del virtual_bill[k] try: del virtual_bill["net-gbp"] except KeyError: pass virtual_bill["net-gbp"] = sum(v for k, v in virtual_bill.items() if k.endswith("-gbp")) era = supply.find_era_at(sess, bill_finish) if era is None: imp_mpan_core = exp_mpan_core = None site_code = site_name = None virtual_bill[ "problem"] += "This bill finishes before or after the supply. " else: imp_mpan_core = era.imp_mpan_core exp_mpan_core = era.exp_mpan_core site = (sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one()) site_code = site.code site_name = site.name # Find bill to use for header data if bill.id not in covered_bills: for cbill in covered_bills.values(): if bill.batch == cbill.batch: bill = cbill values = [ bill.batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, bill_start, bill_finish, imp_mpan_core, exp_mpan_core, site_code, site_name, covered_start, covered_finish, " | ".join(sorted([str(k) for k in covered_bills.keys()])), metered_kwh, ] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append("") try: virt_val = virtual_bill[title] values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = 0 values.append("") if title.endswith("-gbp"): if isinstance(virt_val, (int, float, Decimal)): if isinstance(cov_val, (int, float, Decimal)): values.append(float(cov_val) - float(virt_val)) else: values.append(0 - float(virt_val)) else: values.append(0) report_run_values = {} report_run_titles = list(titles) for title in sorted(virtual_bill.keys()): virt_val = virtual_bill[title] virt_title = "virtual-" + title values += [virt_title, virt_val] report_run_values[virt_title] = virt_val report_run_titles.append(virt_title) if title in covered_bdown: cov_title = "covered-" + title cov_val = covered_bdown[title] report_run_values[cov_title] = cov_val report_run_titles.append(cov_title) if title.endswith("-gbp"): if isinstance(virt_val, (int, float, Decimal)): if isinstance(cov_val, (int, float, Decimal)): diff_val = float(cov_val) - float(virt_val) else: diff_val = 0 - float(virt_val) else: diff_val = 0 report_run_values[f"difference-{title}"] = diff_val t = "difference-tpr-gbp" try: report_run_values[t] += diff_val except KeyError: report_run_values[t] = diff_val report_run_titles.append(t) else: cov_title, cov_val = "", "" values += [cov_title, cov_val] writer.writerow([csv_make_val(v) for v in values]) report_run_values.update(dict(zip(titles, values))) report_run_values["bill_id"] = bill.id report_run_values["batch_id"] = bill.batch.id report_run_values["supply_id"] = supply.id report_run_values["site_id"] = None if site_code is None else site.id report_run.insert_row(sess, "", report_run_titles, report_run_values) for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, ): for k, v in loads(bill.breakdown).items(): if k.endswith("-gbp"): add_gap( caches, gaps, k[:-4], bill.start_date, bill.finish_date, False, v, ) # Avoid long-running transactions sess.commit() clumps = [] for element, elgap in sorted(gaps.items()): for start_date, hhgap in sorted(elgap.items()): if hhgap["has_virtual"] and not hhgap["has_covered"]: if len(clumps) == 0 or not all(( clumps[-1]["element"] == element, clumps[-1]["finish_date"] + HH == start_date, )): clumps.append({ "element": element, "start_date": start_date, "finish_date": start_date, "gbp": hhgap["gbp"], }) else: clumps[-1]["finish_date"] = start_date for i, clump in enumerate(clumps): vals = {} for title in titles: if title.startswith("difference-") and title.endswith("-gbp"): vals[title] = 0 else: vals[title] = None vals["covered-problem"] = "_".join(( "missing", clump["element"], "supplyid", str(supply.id), "from", hh_format(clump["start_date"]), )) vals["imp-mpan-core"] = imp_mpan_core vals["exp-mpan-core"] = exp_mpan_core vals["batch"] = "missing_bill" vals["bill-start-date"] = hh_format(clump["start_date"]) vals["bill-finish-date"] = hh_format(clump["finish_date"]) vals["difference-net-gbp"] = clump["gbp"] writer.writerow(csv_make_val(vals[title]) for title in titles) vals["bill_id"] = None vals["batch_id"] = None vals["supply_id"] = supply.id vals["site_id"] = None if site_code is None else site.id report_run.insert_row(sess, "", titles, vals) # Avoid long-running transactions sess.commit()
def content(batch_id, bill_id, user): caches = {} tmp_file = sess = bill = None forecast_date = Datetime.max.replace(tzinfo=pytz.utc) try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'bill_check.csv', user) tmp_file = open(running_name, mode='w', newline='') writer = csv.writer(tmp_file, lineterminator='\n') bills = sess.query(Bill).options( joinedload(Bill.supply), subqueryload(Bill.reads).joinedload(RegisterRead.present_type), subqueryload(Bill.reads).joinedload(RegisterRead.previous_type)) if batch_id is not None: batch = Batch.get_by_id(sess, batch_id) bills = bills.filter(Bill.batch == batch).order_by(Bill.reference) elif bill_id is not None: bill = Bill.get_by_id(sess, bill_id) bills = bills.filter(Bill.id == bill.id) batch = bill.batch contract = batch.contract market_role_code = contract.market_role.code vbf = chellow.computer.contract_func(caches, contract, 'virtual_bill') if vbf is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill.") virtual_bill_titles_func = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles') if virtual_bill_titles_func is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill_titles.") virtual_bill_titles = virtual_bill_titles_func() titles = [ 'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp', 'bill-vat-gbp', 'bill-start-date', 'bill-finish-date', 'bill-mpan-core', 'site-code', 'site-name', 'covered-from', 'covered-to', 'covered-bills', 'metered-kwh'] for t in virtual_bill_titles: titles.append('covered-' + t) titles.append('virtual-' + t) if t.endswith('-gbp'): titles.append('difference-' + t) writer.writerow(titles) for bill in bills: problem = '' supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: problem += "The MSN " + read_msn + \ " of the register read " + str(read.id) + \ " doesn't match the MSN of the era." for dt, type in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type)]: key = str(dt) + "-" + read.msn try: if type != read_dict[key]: problem += " Reads taken on " + str(dt) + \ " have differing read types." except KeyError: read_dict[key] = type bill_start = bill.start_date bill_finish = bill.finish_date era = supply.find_era_at(sess, bill_finish) if era is None: raise BadRequest( "Extraordinary! There isn't an era for the bill " + str(bill.id) + ".") values = [ batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, hh_format(bill_start), hh_format(bill_finish), era.imp_mpan_core] covered_start = bill_start covered_finish = bill_finish covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0} enlarged = True while enlarged: enlarged = False covered_bills = OrderedDict( (b.id, b) for b in sess.query(Bill).join(Batch). join(Contract).join(MarketRole). filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code).order_by( Bill.start_date, Bill.issue_date)) while True: to_del = None for a, b in combinations(covered_bills.values(), 2): if all( ( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross)): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del covered_bills[k] for covered_bill in covered_bills.values(): if covered_bill.start_date < covered_start: covered_start = covered_bill.start_date enlarged = True break if covered_bill.finish_date > covered_finish: covered_finish = covered_bill.finish_date enlarged = True break if bill.id not in covered_bills: continue primary_covered_bill = None for covered_bill in covered_bills.values(): covered_bdown['net-gbp'] += float(covered_bill.net) covered_bdown['vat-gbp'] += float(covered_bill.vat) covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh) if len(covered_bill.breakdown) > 0: covered_rates = defaultdict(set) for k, v in eval(covered_bill.breakdown, {}).items(): if k.endswith('rate'): covered_rates[k].add(v) elif k != 'raw-lines': try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None if primary_covered_bill is None or ( ( covered_bill.finish_date - covered_bill.start_date) > ( primary_covered_bill.finish_date - primary_covered_bill.start_date)): primary_covered_bill = covered_bill virtual_bill = {} metered_kwh = 0 for era in sess.query(Era).filter( Era.supply == supply, Era.imp_mpan_core != null(), Era.start_date <= covered_finish, or_( Era.finish_date == null(), Era.finish_date >= covered_start), or_( Era.mop_contract == contract, Era.hhdc_contract == contract, Era.imp_supplier_contract == contract, Era.exp_supplier_contract == contract)).distinct(): site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one() chunk_start = hh_max(covered_start, era.start_date) chunk_finish = hh_min(covered_finish, era.finish_date) data_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches, primary_covered_bill) if data_source.measurement_type == 'hh': metered_kwh += sum( h['msp-kwh'] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches) metered_kwh += sum(h['msp-kwh'] for h in ds.hh_data) vbf(data_source) if market_role_code == 'X': vb = data_source.supplier_bill elif market_role_code == 'C': vb = data_source.dc_bill elif market_role_code == 'M': vb = data_source.mop_bill else: raise BadRequest("Odd market role.") for k, v in vb.items(): try: if isinstance(v, set): virtual_bill[k].update(v) else: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " and value " + str(v) + ". " + str(detail)) values += [ site.code, site.name, hh_format(covered_start), hh_format(covered_finish), ':'.join( str(i).replace(',', '') for i in covered_bills.keys()), metered_kwh] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append('') try: virt_val = csv_make_val(virtual_bill[title]) values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = None values.append('') if title.endswith('-gbp'): if isinstance(virt_val, (int, float)): if isinstance(cov_val, (int, float)): values.append(cov_val - virt_val) else: values.append(0 - virt_val) else: values.append('') for title in sorted(virtual_bill.keys()): virt_val = csv_make_val(virtual_bill[title]) values += ['virtual-' + title, virt_val] if title in covered_bdown: values += ['covered-' + title, covered_bdown[title]] else: values += ['', ''] writer.writerow(values) except BadRequest as e: if bill is None: prefix = "Problem: " else: prefix = "Problem with bill " + str(bill.id) + ':' tmp_file.write(prefix + e.description) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)