def test_Supply_get_by_MPAN_core(sess): mpan_core = "22 1737 1873 221" with pytest.raises( BadRequest, match=f"The MPAN core {mpan_core} is not set up in Chellow."): Supply.get_by_mpan_core(sess, mpan_core)
def content(year, month, months, supply_id, sess): try: finish_date = Datetime(year, month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) reads = sess.query(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date))) \ .join(Bill).order_by(Bill.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) reads = reads.filter(Bill.supply == supply) yield ','.join( ( 'Duration Start', 'Duration Finish', 'Supply Id', 'Import MPAN Core', 'Export MPAN Core', 'Batch Reference', 'Bill Id,Bill Reference', 'Bill Issue Date', 'Bill Type', 'Register Read Id', 'TPR', 'Coefficient', 'Previous Read Date', 'Previous Read Value', 'Previous Read Type', 'Present Read Date', 'Present Read Value', 'Present Read Type')) + '\n' for read in reads: bill = read.bill supply = bill.supply batch = bill.batch era = supply.find_era_at(sess, bill.start_date) if era is None: eras = sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all() if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] yield ','.join( '"' + ('' if val is None else str(val)) + '"' for val in [ hh_format(start_date), hh_format(finish_date), supply.id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, hh_format(bill.issue_date), bill.bill_type.code, read.id, 'md' if read.tpr is None else read.tpr.code, read.coefficient, hh_format(read.previous_date), read.previous_value, read.previous_type.code, hh_format(read.present_date), read.present_value, read.present_type.code]) + '\n' except: yield traceback.format_exc()
def do_get(sess): supply_id = req_int('supply_id') months = req_int('months') finish_year = req_int("finish_year") finish_month = req_int("finish_month") supply = Supply.get_by_id(sess, supply_id) finish_date = Datetime(finish_year, finish_month, 1, tzinfo=pytz.utc) + \ relativedelta(months=1) - HH start_date = Datetime(finish_year, finish_month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) era = sess.query(Era).filter( Era.supply == supply, Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).order_by( Era.start_date.desc()).first() keys = { True: { 'ACTIVE': 'import_active', 'REACTIVE_IMP': 'import_reactive_imp', 'REACTIVE_EXP': 'import_reactive_exp'}, False: { 'ACTIVE': 'export_active', 'REACTIVE_IMP': 'export_reactive_imp', 'REACTIVE_EXP': 'export_reactive_exp'}} hh_data = iter(sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date).order_by( HhDatum.start_date)) hh_lines = [] hh_date = start_date hh_datum = next(hh_data, None) while hh_date <= finish_date: hh_line = {'timestamp': hh_date} hh_lines.append(hh_line) while hh_datum is not None and hh_datum.start_date == hh_date: channel = hh_datum.channel hh_line[keys[channel.imp_related][channel.channel_type]] = hh_datum hh_datum = next(hh_data, None) hh_date += HH return render_template( 'report_17.html', supply=supply, era=era, hh_lines=hh_lines)
def content(supply_id, start_date, finish_date, user): forecast_date = datetime.datetime.max.replace(tzinfo=pytz.utc) caches = {} f = sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, "w") f.write( ','.join( ( "Supply Id", "Supply Name", "Source", "Generator Type", "Site Ids", "Site Names", "From", "To", "PC", "MTC", "CoP", "SSC", "Normal Reads", "Type", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs"))) supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supplies = supplies.filter( Supply.id == Supply.get_by_id(sess, supply_id).id) for supply in supplies: site_codes = '' site_names = '' eras = supply.find_eras(sess, start_date, finish_date) era = eras[-1] for site_era in era.site_eras: site = site_era.site site_codes = site_codes + site.code + ', ' site_names = site_names + site.name + ', ' site_codes = site_codes[:-2] site_names = site_names[:-2] if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add( str(rdate) + "_" + read.msn) supply_type = era.make_meter_category() if eras[0].start_date > start_date: chunk_start = eras[0].start_date else: chunk_start = start_date if hh_before(finish_date, era.finish_date): chunk_finish = finish_date else: chunk_finish = era.finish_date num_hh = int( (chunk_finish - (chunk_start - HH)).total_seconds() / (30 * 60)) f.write( '\n' + ','.join( ('"' + str(value) + '"') for value in [ supply.id, supply.name, supply.source.code, generator_type, site_codes, site_names, hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, len(prime_reads), supply_type]) + ',') f.write( mpan_bit( sess, supply, True, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches) + "," + mpan_bit( sess, supply, False, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches)) except: f.write(traceback.format_exc()) finally: sess.close() f.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, "w") ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] f.write( ','.join( ( 'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) + '\n') year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)): meter_type = era.make_meter_category() era_start = era.start_date period_start = era_start \ if era_start > year_start else year_start era_finish = era.finish_date if hh_after(era_finish, year_finish): period_finish = year_finish else: period_finish = era_finish max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and \ (not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: if pair_start > period_start: block_start = pair_start else: block_start = period_start if pair_finish < period_finish: block_finish = pair_finish else: block_finish = period_finish if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': bills = sess.query(Bill).filter( Bill.supply == supply, Bill.finish_date >= period_start, Bill.start_date <= period_finish) for bill in bills: total_kwh[meter_type] += kwh normal_days[meter_type] += ( ( period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 f.write( ','.join( '"' + str(val) + '"' for val in [ supply.id, mpan_core, site.code, site.name, hh_format(year_start), hh_format(year_finish), breakdown] + [ normal_days[type] for type in meter_types] + [ max_normal_days[type] for type in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[type] for type in meter_types] + [filled_kwh[type] for type in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) + '\n') # avoid a long running transaction sess.rollback() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _process_MTR(elements, headers): if headers["message_type"] != "UTLBIL": return sess = headers["sess"] try: mpan_core = headers["mpan_core"] except KeyError: raise BadRequest("The mpan_core can't be found for this bill.") start_date = headers["start_date"] reads = headers["reads"] supply = Supply.get_by_mpan_core(sess, mpan_core) era = supply.find_era_at(sess, start_date) bill_elements = [] if era is None: era = supply.find_last_era(sess) if era is None: imp_mpan_core = "" ssc = Ssc.get_by_code(sess, "0393") else: imp_mpan_core = era.imp_mpan_core ssc = Ssc.get_by_code(sess, "0393") if era.ssc is None else era.ssc try: ssc_lookup = imp_mpan_core tpr_map = SSC_MAP[ssc_lookup] except KeyError: ssc_lookup = ssc.code try: tpr_map = SSC_MAP[ssc_lookup] except KeyError: raise BadRequest(f"The SSC {ssc_lookup} isn't in the SSC_MAP.") for read in reads: desc = read["tpr_code"] try: read["tpr_code"] = tpr_map[desc] except KeyError: raise BadRequest(f"The description {desc} isn't in the SSC_MAP " f"for the SSC {ssc_lookup}.") for el in headers["bill_elements"]: if el.desc == "Energy Charges": # If it's an unmetered supply there is only one charge # line in the EDI, no mater how many TPRs there are. # Therefore we split the charge evenly between the # TPRs. mrs = ssc.measurement_requirements num_mrs = len(mrs) gbp = el.gbp / num_mrs cons = el.cons / num_mrs for mr in mrs: tpr_code = mr.tpr.code titles = (tpr_code + "-gbp", tpr_code + "-rate", tpr_code + "-kwh") bill_elements.append( BillElement(gbp=gbp, rate=el.rate, cons=cons, titles=titles, desc=None)) else: if el.titles is None: try: tpr = tpr_map[el.desc] except KeyError: raise BadRequest( f"The billing element description {el.desc} " f"isn't in the SSC_MAP for the SSC {ssc_lookup}.") titles = (tpr + "-gbp", tpr + "-rate", tpr + "-kwh") else: titles = el.titles bill_elements.append( BillElement(gbp=el.gbp, titles=titles, rate=el.rate, cons=el.cons, desc=None)) breakdown = headers["breakdown"] for bill_el in bill_elements: eln_gbp, eln_rate, eln_cons = bill_el.titles try: breakdown[eln_gbp] += bill_el.gbp except KeyError: breakdown[eln_gbp] = bill_el.gbp rate = bill_el.rate if eln_rate is not None and rate is not None: try: rates = breakdown[eln_rate] except KeyError: rates = breakdown[eln_rate] = set() rates.add(rate) cons = bill_el.cons if eln_cons is not None and cons is not None: try: breakdown[eln_cons] += cons except KeyError: breakdown[eln_cons] = cons return { "kwh": headers["kwh"], "reference": headers["reference"], "mpan_core": mpan_core, "issue_date": headers["issue_date"], "account": headers["account"], "start_date": start_date, "finish_date": headers["finish_date"], "net": headers["net"], "vat": headers["vat"], "gross": headers["gross"], "breakdown": breakdown, "reads": reads, "bill_type_code": headers["bill_type_code"], }
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Date", "Import MPAN Core", "Export MPAN Core", "Physical Site Id", "Physical Site Name", "Other Site Ids", "Other Site Names", "Supply Id", "Source", "Generator Type", "GSP Group", "DNO Name", "Voltage Level", "Is Substations", "Metering Type", "Mandatory HH", "PC", "MTC", "CoP", "SSC Code", "SSC Description", "Energisation Status", "Number Of Registers", "MOP Contract", "Mop Account", "DC Contract", "DC Account", "Meter Serial Number", "Meter Installation Date", "Latest Normal Meter Read Date", "Latest Normal Meter Read Type", "Latest DC Bill Date", "Latest MOP Bill Date", "Supply Start Date", "Supply Finish Date", "Properties", "Import ACTIVE?", "Import REACTIVE_IMPORT?", "Import REACTIVE_EXPORT?", "Export ACTIVE?", "Export REACTIVE_IMPORT?", "Export REACTIVE_EXPORT?", "Import Agreed Supply Capacity (kVA)", "Import LLFC Code", "Import LLFC Description", "Import Supplier Contract", "Import Supplier Account", "Import Mandatory kW", "Latest Import Supplier Bill Date", "Export Agreed Supply Capacity (kVA)", "Export LLFC Code", "Export LLFC Description", "Export Supplier Contract", "Export Supplier Account", "Export Mandatory kW", "Latest Export Supplier Bill Date", ) writer.writerow(titles) NORMAL_READ_TYPES = ("N", "C", "N3") year_start = date + HH - relativedelta(years=1) era_ids = (sess.query(Era.id).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date), ).order_by(Era.supply_id)) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) era_ids = era_ids.filter(Era.supply == supply) if mpan_cores is not None: era_ids = era_ids.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for (era_id, ) in era_ids: era, supply, generator_type = (sess.query( Era, Supply, GeneratorType).join( Supply, Era.supply_id == Supply.id).outerjoin( GeneratorType, Supply.generator_type_id == GeneratorType.id).filter( Era.id == era_id).options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras).joinedload( SiteEra.site), joinedload(Era.ssc), joinedload(Era.energisation_status), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.dno), ).one()) site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) sup_eras = (sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all()) supply_start_date = sup_eras[0].start_date supply_finish_date = sup_eras[-1].finish_date if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code is_substation = era.exp_llfc.is_substation else: voltage_level_code = era.imp_llfc.voltage_level.code is_substation = era.imp_llfc.is_substation if generator_type is None: generator_type_str = "" else: generator_type_str = generator_type.code metering_type = era.meter_category if metering_type in ("nhh", "amr"): latest_prev_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id, ).order_by(RegisterRead.previous_date.desc()).options( joinedload(RegisterRead.previous_type)).first()) latest_pres_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply, ).order_by(RegisterRead.present_date.desc()).options( joinedload(RegisterRead.present_type)).first()) if latest_prev_normal_read is None and latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif (latest_pres_normal_read is not None and latest_prev_normal_read is None): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code elif (latest_pres_normal_read is None and latest_prev_normal_read is not None): latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code elif (latest_pres_normal_read.present_date > latest_prev_normal_read.previous_date): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code else: latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = hh_format( latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) dc_contract = era.dc_contract dc_contract_name = dc_contract.name dc_account = era.dc_account latest_dc_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == dc_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_dc_bill_date is not None: latest_dc_bill_date = hh_format(latest_dc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel(sess, imp_related, channel_type) is None: channel_values.append("false") else: channel_values.append("true") imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == "nhh": continue params = { "supply_id": supply.id, "year_start": year_start, "year_finish": date, "is_import": is_import, } month_mds = tuple(md[0] * 2 for md in sess.execute( """ select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params, )) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = "yes" else: mandatory_hh = "no" imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in (True, False): for er in (sess.query(Era).filter( Era.supply == era.supply, Era.start_date <= date).order_by( Era.start_date.desc())): if is_import: if er.imp_mpan_core is None: break else: supplier_contract = er.imp_supplier_contract else: if er.exp_mpan_core is None: break else: supplier_contract = er.exp_supplier_contract latest_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.finish_date >= er.start_date, Bill.finish_date <= hh_min(er.finish_date, date), Bill.supply == supply, Batch.contract == supplier_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_bill_date is not None: latest_bill_date = hh_format(latest_bill_date[0]) if is_import: imp_latest_supplier_bill_date = latest_bill_date else: exp_latest_supplier_bill_date = latest_bill_date break meter_installation_date = (sess.query(func.min( Era.start_date)).filter(Era.supply == era.supply, Era.msn == era.msn).one()[0]) ssc = era.ssc if ssc is None: ssc_code = ssc_description = num_registers = None else: ssc_code, ssc_description = ssc.code, ssc.description num_registers = (sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == ssc).count()) vals = ([ date, era.imp_mpan_core, era.exp_mpan_core, physical_site.code, physical_site.name, ", ".join(site_codes), ", ".join(site_names), supply.id, supply.source.code, generator_type_str, supply.gsp_group.code, supply.dno.dno_code, voltage_level_code, is_substation, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, ssc_description, era.energisation_status.code, num_registers, mop_contract_name, mop_account, dc_contract_name, dc_account, era.msn, meter_installation_date, latest_normal_read_date, latest_normal_read_type, latest_dc_bill_date, latest_mop_bill_date, supply_start_date, supply_finish_date, era.properties, ] + channel_values + [ era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era.imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date, ] + [ era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era.exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date, ]) writer.writerow([csv_make_val(v) for v in vals]) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(user): sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "ecoes_comparison.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") props = Contract.get_non_core_by_name( sess, "configuration").make_properties() ECOES_KEY = "ecoes" try: ecoes_props = props[ECOES_KEY] except KeyError: raise BadRequest( f"The property {ECOES_KEY} cannot be found in the configuration " f"properties.") for key in ("user_name", "password", "prefix", "ignore_mpan_cores"): try: ecoes_props[key] except KeyError: raise BadRequest( f"The property {key} cannot be found in the 'ecoes' section of " f"the configuration properties.") ignore_mpan_cores = ecoes_props["ignore_mpan_cores"] proxies = props.get("proxies", {}) s = requests.Session() s.verify = False r = s.get(ecoes_props["prefix"], proxies=proxies) r = s.post( ecoes_props["prefix"], data={ "Username": ecoes_props["user_name"], "Password": ecoes_props["password"], }, allow_redirects=False, ) imp_mpans = [ v.imp_mpan_core for (v, ) in sess.execute( select(Era).join(Supply).join(Source).join(Supply.dno).filter( Party.dno_code.notin_(("88", "99")), Era.finish_date == null(), Source.code != "3rd-party", Era.imp_mpan_core.notin_(ignore_mpan_cores), Era.imp_mpan_core != null(), ).distinct().order_by(Era.imp_mpan_core)) ] exp_mpans = [ v.exp_mpan_core for (v, ) in sess.execute( select(Era).join(Supply).join(Source).join(Supply.dno).filter( Party.dno_code.notin_(("88", "99")), Era.finish_date == null(), Source.code != "3rd-party", Era.exp_mpan_core != null(), Era.exp_mpan_core.notin_(ignore_mpan_cores), ).distinct().order_by(Era.exp_mpan_core)) ] mpans = imp_mpans + exp_mpans r = s.get( ecoes_props["prefix"] + "NonDomesticCustomer/ExportPortfolioMPANs?fileType=csv", proxies=proxies, ) titles = ( "MPAN Core", "MPAN Core No Spaces", "ECOES PC", "Chellow PC", "ECOES MTC", "Chellow MTC", "ECOES LLFC", "Chellow LLFC", "ECOES SSC", "Chellow SSC", "ECOES Energisation Status", "Chellow Energisation Status", "ECOES Supplier", "Chellow Supplier", "ECOES DC", "Chellow DC", "ECOES MOP", "Chellow MOP", "ECOES GSP Group", "Chellow GSP Group", "ECOES MSN", "Chellow MSN", "ECOES Meter Type", "Chellow Meter Type", "Problem", ) writer.writerow(titles) parser = iter(csv.reader(r.text.splitlines(True))) next(parser) # Skip titles for values in parser: problem = "" ecoes_titles = [ "mpan-core", "address-line-1", "address-line-2", "address-line-3", "address-line-4", "address-line-5", "address-line-6", "address-line-7", "address-line-8", "address-line-9", "post-code", "supplier", "registration-from", "mtc", "mtc-date", "llfc", "llfc-from", "pc", "ssc", "measurement-class", "energisation-status", "da", "dc", "mop", "mop-appoint-date", "gsp-group", "gsp-effective-from", "dno", "msn", "meter-install-date", "meter-type", "map-id", ] ecoes = dict(zip(ecoes_titles, map(str.strip, values))) mpan_spaces = " ".join(( ecoes["mpan-core"][:2], ecoes["mpan-core"][2:6], ecoes["mpan-core"][6:10], ecoes["mpan-core"][-3:], )) if mpan_spaces in ignore_mpan_cores: continue try: ecoes_es = ecoes["energisation-status"] except KeyError as e: print(r.text) raise e ecoes_disconnected = ecoes_es == "" current_chell = mpan_spaces in mpans if ecoes_disconnected and current_chell: problem += "Disconnected in ECOES, but current in Chellow. " elif not ecoes_disconnected and not current_chell: problem += f"In ECOES (as {ecoes_es}) but disconnected in Chellow. " if current_chell: mpans.remove(mpan_spaces) era = sess.execute( select(Era).filter( Era.finish_date == null(), or_( Era.imp_mpan_core == mpan_spaces, Era.exp_mpan_core == mpan_spaces, ), ).options( joinedload(Era.supply).joinedload(Supply.gsp_group), joinedload(Era.mop_contract).joinedload( Contract.party).joinedload(Party.participant), joinedload(Era.dc_contract).joinedload( Contract.party).joinedload(Party.participant), joinedload(Era.imp_supplier_contract).joinedload( Contract.party).joinedload(Party.participant), joinedload(Era.exp_supplier_contract).joinedload( Contract.party).joinedload(Party.participant), joinedload(Era.pc), joinedload(Era.imp_llfc), joinedload(Era.exp_llfc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.ssc), joinedload(Era.energisation_status), joinedload(Era.channels), )).scalar() if era.imp_mpan_core == mpan_spaces: supplier_contract = era.imp_supplier_contract llfc = era.imp_llfc else: supplier_contract = era.exp_supplier_contract llfc = era.exp_llfc chellow_pc = era.pc.code try: if int(ecoes["pc"]) != int(chellow_pc): problem += "The PCs don't match. " except ValueError: problem += "Can't parse the PC. " chellow_mtc = era.mtc.code try: if int(ecoes["mtc"]) != int(chellow_mtc): problem += "The MTCs don't match. " except ValueError: problem += "Can't parse the MTC. " chellow_llfc = llfc.code if ecoes["llfc"].zfill(3) != chellow_llfc: problem += "The LLFCs don't match. " chellow_ssc = era.ssc if chellow_ssc is None: chellow_ssc = "" chellow_ssc_int = None else: chellow_ssc = chellow_ssc.code chellow_ssc_int = int(chellow_ssc) if len(ecoes["ssc"]) > 0: ecoes_ssc_int = int(ecoes["ssc"]) else: ecoes_ssc_int = None if ecoes_ssc_int != chellow_ssc_int and not ( ecoes_ssc_int is None and chellow_ssc_int is None): problem += "The SSCs don't match. " chellow_es = era.energisation_status.code if ecoes_es != chellow_es: problem += "The energisation statuses don't match. " chellow_supplier = supplier_contract.party.participant.code if chellow_supplier != ecoes["supplier"]: problem += "The supplier codes don't match. " dc_contract = era.dc_contract if dc_contract is None: chellow_dc = "" else: chellow_dc = dc_contract.party.participant.code if chellow_dc != ecoes["dc"]: problem += "The DC codes don't match. " mop_contract = era.mop_contract if mop_contract is None: chellow_mop = "" else: chellow_mop = mop_contract.party.participant.code if chellow_mop != ecoes["mop"]: problem += "The MOP codes don't match. " chellow_gsp_group = era.supply.gsp_group.code if chellow_gsp_group != ecoes["gsp-group"]: problem += "The GSP group codes don't match. " chellow_msn = era.msn if chellow_msn is None: chellow_msn = "" if chellow_msn != ecoes["msn"]: problem += "The meter serial numbers don't match. " chellow_meter_type = _meter_type(era) if chellow_meter_type != ecoes["meter-type"]: problem += ( "The meter types don't match. See " "https://dtc.mrasco.com/DataItem.aspx?ItemCounter=0483 " ) else: chellow_pc = "" chellow_mtc = "" chellow_llfc = "" chellow_ssc = "" chellow_es = "" chellow_supplier = "" chellow_dc = "" chellow_mop = "" chellow_gsp_group = "" chellow_msn = "" chellow_meter_type = "" if len(problem) > 0: writer.writerow([ mpan_spaces, ecoes["mpan-core"], ecoes["pc"], chellow_pc, ecoes["mtc"], chellow_mtc, ecoes["llfc"], chellow_llfc, ecoes["ssc"], chellow_ssc, ecoes["energisation-status"], chellow_es, ecoes["supplier"], chellow_supplier, ecoes["dc"], chellow_dc, ecoes["mop"], chellow_mop, ecoes["gsp-group"], chellow_gsp_group, ecoes["msn"], chellow_msn, ecoes["meter-type"], chellow_meter_type, problem, ]) sess.expunge_all() for mpan_core in mpans: supply = Supply.get_by_mpan_core(sess, mpan_core) era = supply.find_era_at(sess, None) if era.imp_mpan_core == mpan_core: supplier_contract = era.imp_supplier_contract llfc = era.imp_llfc else: supplier_contract = era.exp_supplier_contract llfc = era.exp_llfc ssc = "" if era.ssc is None else era.ssc.code es = era.energisation_status.code dc_contract = era.dc_contract if dc_contract is None: dc = "" else: dc = dc_contract.party.participant.code mop_contract = era.mop_contract if mop_contract is None: mop = "" else: mop = mop_contract.party.participant.code msn = "" if era.msn is None else era.msn meter_type = _meter_type(era) writer.writerow([ mpan_core, mpan_core.replace(" ", ""), "", era.pc.code, "", era.mtc.code, "", llfc.code, "", ssc, "", es, "", supplier_contract.party.participant.code, "", dc, "", mop, "", supply.gsp_group.code, "", msn, "", meter_type, "In Chellow, but not in ECOES.", ]) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, start_date, finish_date, user): forecast_date = to_utc(Datetime.max) caches = {} f = sess = era = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') w.writerow( ( "Era Start", "Era Finish", "Supply Id", "Supply Name", "Source", "Generator Type", "Site Code", "Site Name", "Associated Site Codes", "From", "To", "PC", "MTC", "CoP", "SSC", "Properties", "MOP Contract", "MOP Account", "DC Contract", "DC Account", "Normal Reads", "Type", "Supply Start", "Supply Finish", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs")) eras = sess.query(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by( Era.supply_id, Era.start_date).options( joinedload(Era.supply), joinedload(Era.supply).joinedload(Supply.source), joinedload(Era.supply).joinedload(Supply.generator_type), joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.imp_llfc), joinedload(Era.exp_llfc), joinedload(Era.mop_contract), joinedload(Era.dc_contract), joinedload(Era.channels), joinedload(Era.site_eras).joinedload(SiteEra.site), joinedload(Era.pc), joinedload(Era.cop), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.ssc), joinedload(Era.site_eras)) if supply_id is not None: eras = eras.filter(Era.supply == Supply.get_by_id(sess, supply_id)) for era in eras: supply = era.supply site_codes = set() site = None for site_era in era.site_eras: if site_era.is_physical: site = site_era.site else: site_codes.add(site_era.site.code) sup_eras = sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all() supply_start = sup_eras[0].start_date supply_finish = sup_eras[-1].finish_date if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)).options( joinedload(RegisterRead.bill)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)).options( joinedload(RegisterRead.bill))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add(str(rdate) + "_" + read.msn) supply_type = era.meter_category chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) num_hh = int( (chunk_finish + HH - chunk_start).total_seconds() / (30 * 60)) w.writerow( [ hh_format(era.start_date), hh_format(era.finish_date, ongoing_str=''), supply.id, supply.name, supply.source.code, generator_type, site.code, site.name, '| '.join(sorted(site_codes)), hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, era.properties, era.mop_contract.name, era.mop_account, era.dc_contract.name, era.dc_account, len(prime_reads), supply_type, hh_format(supply_start), hh_format(supply_finish, ongoing_str='')] + mpan_bit( sess, supply, True, num_hh, era, chunk_start, chunk_finish, forecast_date, caches) + mpan_bit( sess, supply, False, num_hh, era, chunk_start, chunk_finish, forecast_date, caches)) # Avoid a long-running transaction sess.rollback() except BadRequest as e: if era is None: pref = "Problem: " else: pref = "Problem with era " + chellow.utils.url_root + "eras/" + \ str(era.id) + "/edit : " f.write(pref + e.description) except BaseException as e: if era is None: pref = "Problem: " else: pref = "Problem with era " + str(era.id) + ": " f.write(pref + str(e)) f.write(traceback.format_exc()) finally: sess.close() f.close() os.rename(running_name, finished_name)
def content( start_year, start_month, start_day, finish_year, finish_month, finish_day, is_import, supply_id, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'daily_supplier_virtual_bill.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') start_date = Datetime( start_year, start_month, start_day, tzinfo=pytz.utc) finish_date = Datetime( finish_year, finish_month, finish_day, tzinfo=pytz.utc) + \ relativedelta(days=1) - HH supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() day_start = start_date header_titles = [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', 'Is Forecast?' ] bill_titles = [] # Find titles for era in sess.query(Era).filter( Era.supply == supply, Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)): if is_import: cont = era.imp_supplier_contract else: cont = era.exp_supplier_contract for title in chellow.computer.contract_func( caches, cont, 'virtual_bill_titles')(): if title not in bill_titles: bill_titles.append(title) ssc = era.ssc if ssc is not None: for mr in ssc.measurement_requirements: for suffix in ('-kwh', '-rate', '-gbp'): title = mr.tpr.code + suffix if title not in bill_titles: bill_titles.append(title) writer.writerow(header_titles + bill_titles) while not day_start > finish_date: day_finish = day_start + relativedelta(days=1) - HH for era in supply.find_eras(sess, day_start, day_finish): chunk_start = hh_max(era.start_date, day_start) chunk_finish = hh_min(era.finish_date, day_finish) ss = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() row = [ ss.mpan_core, site.code, site.name, ss.supplier_account, hh_format(ss.start_date), hh_format(ss.finish_date), ss.years_back > 0] chellow.computer.contract_func( caches, ss.supplier_contract, 'virtual_bill')(ss) bill = ss.supplier_bill for title in bill_titles: if title in bill: row.append(csv_make_val(bill[title])) del bill[title] else: row.append('') for k in sorted(bill.keys()): row.append(k) row.append(csv_make_val(bill[k])) writer.writerow(row) day_start += relativedelta(days=1) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None month_start = datetime.datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) while not month_start > finish_date: month_finish = month_start + relativedelta(months=1) - HH if month_start > start_date: period_start = month_start else: period_start = start_date if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for era in sess.query(Era).filter( Era.supply == supply, Era.start_date < period_finish, or_( Era.finish_date == null(), Era.finish_date > period_start )).order_by(Era.start_date): chunk_start = era.start_date \ if era.start_date > period_start else period_start chunk_finish = period_finish \ if hh_before(period_finish, era.finish_date) \ else era.finish_date site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) titles = [ 'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', ''] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.imp_supplier_contract is not None: output_line.append('') imp_supplier_titles = ds.contract_func( era.imp_supplier_contract, 'virtual_bill_titles')() titles.append('') titles.extend( ['imp-supplier-' + t for t in imp_supplier_titles]) ds.contract_func( era.imp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in imp_supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, False, None, caches) output_line.append('') exp_supplier_titles = ds.contract_func( era.exp_supplier_contract, 'virtual_bill_titles')() titles.append('') titles.extend( ['exp-supplier-' + t for t in exp_supplier_titles]) ds.contract_func( era.exp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in exp_supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles != titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): if isinstance(val, datetime.datetime): output_line[i] = hh_format(val) elif val is None: output_line[i] = '' else: output_line[i] = str(val) writer.writerow(output_line) month_start += relativedelta(months=1) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({ 'date': date, 'code': code, 'mpan-core': mpan_core }) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event(events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.dc_contract_id != prev_era.dc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event(events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event(events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event(events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code'] ] writer.writerow(vals) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "supply_virtual_bills_" + str(supply_id) + ".csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None for era in (sess.query(Era).filter( Era.supply == supply, Era.start_date < finish_date, or_(Era.finish_date == null(), Era.finish_date > start_date), ).order_by(Era.start_date)): chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) site = (sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one()) ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, era.imp_supplier_contract is not None, caches, ) titles = [ "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Account", "From", "To", "", ] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), "", ] mop_titles = ds.contract_func(era.mop_contract, "virtual_bill_titles")() titles.extend(["mop-" + t for t in mop_titles]) ds.contract_func(era.mop_contract, "virtual_bill")(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append("") for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append("") dc_titles = ds.contract_func(era.dc_contract, "virtual_bill_titles")() titles.append("") titles.extend(["dc-" + t for t in dc_titles]) ds.contract_func(era.dc_contract, "virtual_bill")(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, "")) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join( Ssc).join(Era).filter( Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), ).order_by(Tpr.code).distinct()) if era.imp_supplier_contract is not None: output_line.append("") supplier_titles = ds.contract_func(era.imp_supplier_contract, "virtual_bill_titles")() for tpr in tpr_query.filter( Era.imp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): supplier_titles.append(tpr.code + suffix) titles.append("") titles.extend(["imp-supplier-" + t for t in supplier_titles]) ds.contract_func(era.imp_supplier_contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append("") for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource(sess, chunk_start, chunk_finish, forecast_date, era, False, caches) output_line.append("") supplier_titles = ds.contract_func(era.exp_supplier_contract, "virtual_bill_titles")() for tpr in tpr_query.filter( Era.exp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): supplier_titles.append(tpr.code + suffix) titles.append("") titles.extend(["exp-supplier-" + t for t in supplier_titles]) ds.contract_func(era.exp_supplier_contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, "")) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): output_line[i] = csv_make_val(val) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ["crc", str(year), str(year + 1)] if supply_id is None: fname.append("all_supplies") else: fname.append("supply_" + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( "_".join(fname) + ".csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") ACTUAL_READ_TYPES = ["N", "N3", "C", "X", "CP"] w.writerow(( "Chellow Supply Id", "Report Start", "Report Finish", "MPAN Core", "Site Id", "Site Name", "From", "To", "NHH Breakdown", "Actual HH Normal Days", "Actual AMR Normal Days", "Actual NHH Normal Days", "Actual Unmetered Normal Days", "Max HH Normal Days", "Max AMR Normal Days", "Max NHH Normal Days", "Max Unmetered Normal Days", "Total Actual Normal Days", "Total Max Normal Days", "Data Type", "HH kWh", "AMR kWh", "NHH kWh", "Unmetered kwh", "HH Filled kWh", "AMR Filled kWh", "Total kWh", "Note", )) year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = (sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(("net", "gen-net")), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start), ).distinct().order_by(Supply.id)) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ("hh", "amr", "nhh", "unmetered") for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ("hh", "amr")]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = "" eras = (sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start), ).order_by(Era.start_date).all()) supply_from = hh_max(eras[0].start_date, year_start) supply_to = hh_min(eras[-1].finish_date, year_finish) for era in eras: meter_type = era.meter_category period_start = hh_max(era.start_date, year_start) period_finish = hh_min(era.finish_date, year_finish) max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = (sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one()) if meter_type in ("nhh", "amr"): read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join( BillType).join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != "W", ).order_by(RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join( BillType).join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != "W", ).order_by(RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join( BillType).join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != "W", ).order_by(RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join( BillType).join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != "W", ).order_by(RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = "_".join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = ( sess.query(Bill).join(BillType).filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != "W", ).order_by(Bill.issue_date.desc(), BillType.code).first()) if pres_bill != pres_read.bill: continue reads = dict(( read.tpr.code, float(read.present_value) * float(read.coefficient), ) for read in sess.query(RegisterRead).filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn, )) prime_pres_read = { "date": pres_date, "reads": reads, "msn": pres_msn, } read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = "_".join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = ( sess.query(Bill).join(BillType).filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != "W", ).order_by(Bill.issue_date.desc(), BillType.code).first()) if prev_bill != prev_read.bill: continue reads = dict(( read.tpr.code, float(read.previous_value) * float(read.coefficient), ) for read in sess.query(RegisterRead).filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn, )) prime_prev_read = { "date": prev_date, "reads": reads, "msn": prev_msn, } read_keys[read_key] = None if prime_pres_read is None and prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if (prime_pres_read["date"] <= prime_prev_read["date"]): read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if (prime_prev_read["date"] >= prime_pres_read["date"]): read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read["msn"] == fore_read["msn"] and set( aft_read["reads"].keys()) == set( fore_read["reads"].keys()): pair_start_date = aft_read["date"] + HH pair_finish_date = fore_read["date"] num_hh = ((pair_finish_date + HH - pair_start_date ).total_seconds()) / (30 * 60) tprs = {} for tpr_code, initial_val in aft_read[ "reads"].items(): end_val = fore_read["reads"][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10**digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append({ "start-date": pair_start_date, "finish-date": pair_finish_date, "tprs": tprs, }) if len(pairs) > 0 and ( not is_forwards or (is_forwards and read_list[-1]["date"] > period_finish)): break breakdown += "read list - \n" + dumps(read_list) + "\n" if len(pairs) == 0: pairs.append({ "start-date": period_start, "finish-date": period_finish, "tprs": { "00001": 0 }, }) else: for pair in pairs: pair_start = pair["start-date"] pair_finish = pair["finish-date"] if pair_start >= year_start and pair_finish <= year_finish: block_start = hh_max(pair_start, period_start) block_finish = hh_min(pair_finish, period_finish) if block_start <= block_finish: normal_days[meter_type] += ( (block_finish - block_start).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]["finish-date"] = pairs[i]["start-date"] - HH # stretch if pairs[0]["start-date"] > period_start: pairs[0]["start-date"] = period_start if pairs[-1]["finish-date"] < period_finish: pairs[-1]["finish-date"] = period_finish # chop pairs = [ pair for pair in pairs if not pair["start-date"] > period_finish and not pair["finish-date"] < period_start ] # squash if pairs[0]["start-date"] < period_start: pairs[0]["start-date"] = period_start if pairs[-1]["finish-date"] > period_finish: pairs[-1]["finish-date"] = period_finish for pair in pairs: pair_hhs = ((pair["finish-date"] - pair["start-date"] ).total_seconds() + 30 * 60) / (60 * 30) pair["pair_hhs"] = pair_hhs for tpr_code, pair_kwh in pair["tprs"].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += "pairs - \n" + dumps(pairs) elif meter_type == "hh": period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value).join( Channel).filter( Channel.imp_related == true(), Channel.channel_type == "ACTIVE", Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, ).order_by(HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value).join( Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == "ACTIVE", Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish, ).order_by(HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = (period_finish + HH - period_start).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += ( year_sum_kwhs / year_len_kwhs * (period_hhs - period_len_kwhs)) normal_days[meter_type] += (sess.query( func.count(HhDatum.value)).join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == "ACTIVE", Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == "A", ).one()[0] / 48) elif meter_type == "unmetered": year_seconds = (year_finish - year_start).total_seconds() + 60 * 30 period_seconds = (period_finish - period_start).total_seconds() + 60 * 30 total_kwh[ meter_type] += era.imp_sc * period_seconds / year_seconds normal_days[meter_type] += period_seconds / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 w.writerow( [ supply.id, hh_format(year_start), hh_format(year_finish), mpan_core, site.code, site.name, hh_format(supply_from), hh_format(supply_to), breakdown, ] + [normal_days[t] for t in meter_types] + [max_normal_days[t] for t in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated", ] + [total_kwh[t] for t in meter_types] + [filled_kwh[t] for t in ("hh", "amr")] + [sum(total_kwh.values()) + sum(filled_kwh.values()), ""]) # avoid a long running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + "\n") f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): tmp_file = sess = None try: sess = Session() supplies = (sess.query(Supply).join(Era).distinct().options( joinedload(Supply.generator_type))) if supply_id is None: base_name = ("supplies_monthly_duration_for_all_supplies_for_" + str(months) + "_to_" + str(year) + "_" + str(month) + ".csv") else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = ("supplies_monthly_duration_for_" + str(supply.id) + "_" + str(months) + "_to_" + str(year) + "_" + str(month) + ".csv") running_name, finished_name = chellow.dloads.make_names( base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = Datetime( year, month, 1, tzinfo=pytz.utc) - relativedelta(months=months - 1) field_names = ( "supply-name", "source-code", "generator-type", "month", "pc-code", "msn", "site-code", "site-name", "metering-type", "import-mpan-core", "metered-import-kwh", "metered-import-net-gbp", "metered-import-estimated-kwh", "billed-import-kwh", "billed-import-net-gbp", "export-mpan-core", "metered-export-kwh", "metered-export-estimated-kwh", "billed-export-kwh", "billed-export-net-gbp", "problem", "timestamp", ) tmp_file.write("supply-id," + ",".join(field_names) + "\n") forecast_date = chellow.computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_(Era.finish_date == null(), Era.finish_date >= month_start), ): generator_type = supply.generator_type if generator_type is None: generator_type = "" else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.meter_category site = (sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one()) values = { "supply-name": supply.name, "source-code": source_code, "generator-type": generator_type, "month": hh_format(month_finish), "pc-code": era.pc.code, "msn": era.msn, "site-code": site.code, "site-name": site.name, "metering-type": metering_type, "problem": "", } tmp_file.write(str(supply.id) + ",") for is_import, pol_name in [(True, "import"), (False, "export")]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + "-mpan-core"] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ["hh", "amr"]: est_kwh = (sess.query( HhDatum.value).join(Channel).join(Era).filter( HhDatum.status == "E", Era.supply_id == supply.id, Channel.channel_type == "ACTIVE", Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish, ).first()) if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ("net", "gen-net")): kwh_sum = (sess.query( cast(func.sum(HhDatum.value), Float)).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == "ACTIVE", Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish, ).one()[0]) if kwh_sum is not None: kwh += kwh_sum values["metered-" + pol_name + "-estimated-kwh"] = est_kwh values["metered-" + pol_name + "-kwh"] = kwh values["metered-" + pol_name + "-net-gbp"] = 0 values["billed-" + pol_name + "-kwh"] = 0 values["billed-" + pol_name + "-net-gbp"] = 0 values["billed-" + pol_name + "-apportioned-kwh"] = 0 values["billed-" + pol_name + "-apportioned-net-gbp"] = 0 values["billed-" + pol_name + "-raw-kwh"] = 0 values["billed-" + pol_name + "-raw-net-gbp"] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start, ): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = (bill_finish - bill_start).total_seconds() + 30 * 60 overlap_duration = (min(bill_finish, month_finish) - max( bill_start, month_start)).total_seconds() + 30 * 60 overlap_proportion = float(overlap_duration) / float( bill_duration) values[ "billed-import-net-gbp"] += overlap_proportion * float( bill.net) values["billed-import-kwh"] += overlap_proportion * float( bill.kwh) for era in eras: chunk_start = hh_max(era.start_date, month_start) chunk_finish = hh_min(era.finish_date, month_finish) import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ["net", "gen-net", "3rd-party"]: supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches, ) values["metered-import-kwh"] += sum( datum["msp-kwh"] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, "virtual_bill") if import_vb_function is None: values["problem"] += ( "Can't find the " "virtual_bill function in the supplier " "contract. ") else: import_vb_function(supply_source) values[ "metered-import-net-gbp"] += supply_source.supplier_bill[ "net-gbp"] supply_source.contract_func( era.dc_contract, "virtual_bill")(supply_source) values[ "metered-import-net-gbp"] += supply_source.dc_bill[ "net-gbp"] mop_func = supply_source.contract_func( era.mop_contract, "virtual_bill") if mop_func is None: values["problem"] += (" MOP virtual_bill " "function can't be found.") else: mop_func(supply_source) mop_bill = supply_source.mop_bill values["metered-import-net-gbp"] += mop_bill[ "net-gbp"] if len(mop_bill["problem"]) > 0: values["problem"] += ( " MOP virtual bill problem: " + mop_bill["problem"]) values["timestamp"] = int(time.time() * 1000) tmp_file.write(",".join('"' + str(values[name]) + '"' for name in field_names) + "\n") except BaseException: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None for era in sess.query(Era).filter( Era.supply == supply, Era.start_date < finish_date, or_( Era.finish_date == null(), Era.finish_date > start_date)).order_by(Era.start_date): chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches) titles = [ 'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', ''] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) tpr_query = sess.query(Tpr).join(MeasurementRequirement). \ join(Ssc).join(Era).filter( Era.start_date <= chunk_finish, or_( Era.finish_date == null(), Era.finish_date >= chunk_start) ).order_by(Tpr.code).distinct() if era.imp_supplier_contract is not None: output_line.append('') supplier_titles = ds.contract_func( era.imp_supplier_contract, 'virtual_bill_titles')() for tpr in tpr_query.filter( Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): supplier_titles.append(tpr.code + suffix) titles.append('') titles.extend(['imp-supplier-' + t for t in supplier_titles]) ds.contract_func(era.imp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, False, caches) output_line.append('') supplier_titles = ds.contract_func( era.exp_supplier_contract, 'virtual_bill_titles')() for tpr in tpr_query.filter( Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): supplier_titles.append(tpr.code + suffix) titles.append('') titles.extend(['exp-supplier-' + t for t in supplier_titles]) ds.contract_func( era.exp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): output_line[i] = csv_make_val(val) writer.writerow(output_line) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(start_date, finish_date, imp_related, channel_type, is_zipped, supply_id, mpan_cores, user): zf = sess = tf = None base_name = ["supplies_hh_data", finish_date.strftime('%Y%m%d%H%M')] cache = {} try: sess = Session() supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = sess.query(Era).filter( Era.supply == supply, or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by( Era.start_date).first() if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(' ', '_')) if mpan_cores is not None: supplies = supplies.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append('filter') outs = [] titles = ','.join([ 'Import MPAN Core', 'Export MPAN Core', 'Import Related?', 'Channel Type', 'Date' ] + list(map(str, range(48)))) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + ('.zip' if is_zipped else '.csv'), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, "w") outs.append(titles) for supply in supplies: era = supply.find_era_at(sess, finish_date) if era is None: imp_mpan_core_str = exp_mpan_core_str = 'NA' else: if era.imp_mpan_core is None: imp_mpan_core_str = "NA" else: imp_mpan_core_str = era.imp_mpan_core if era.exp_mpan_core is None: exp_mpan_core_str = "NA" else: exp_mpan_core_str = era.exp_mpan_core imp_related_str = "TRUE" if imp_related else "FALSE" hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type).order_by( HhDatum.start_date)) datum = next(hh_data, None) for current_date in hh_range(cache, start_date, finish_date): if current_date.hour == 0 and current_date.minute == 0: outs.append("\n" + imp_mpan_core_str + "," + exp_mpan_core_str + "," + imp_related_str + "," + channel_type + "," + current_date.strftime('%Y-%m-%d')) outs.append(",") if datum is not None and datum.start_date == current_date: outs.append(str(datum.value)) datum = next(hh_data, None) if is_zipped: fname = '_'.join((imp_mpan_core_str, exp_mpan_core_str, str(supply.id) + '.csv')) zf.writestr(fname.encode('ascii'), titles + ''.join(outs)) else: tf.write(''.join(outs)) outs = [] # Avoid long-running transaction sess.rollback() if is_zipped: zf.close() else: tf.close() except BaseException: msg = traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) zf.close() else: tf.write(msg) finally: if sess is not None: sess.close() os.rename(running_name, finished_name)
def content(year, supply_id, sess): yield "MPAN Core,Site Id,Site Name,Date,Event," year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({'date': date, 'code': code, 'mpan-core': mpan_core}) try: if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] yield '\n' + ','.join( '"' + str(val) + '"' for val in vals) + ',' else: yield ' ' except: yield traceback.format_exc()
def content( start_date, finish_date, imp_related, channel_type, is_zipped, supply_id, mpan_cores, user): zf = sess = tf = None base_name = ["supplies_hh_data", finish_date.strftime('%Y%m%d%H%M')] try: sess = Session() supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = sess.query(Era).filter( Era.supply == supply, or_( Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Era.start_date).first() if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(' ', '_')) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append('filter') outs = [] titles = "MPAN Core,Date," + ','.join(map(str, range(48))) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + ('.zip' if is_zipped else '.csv'), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, "w") outs.append(titles) for supply in supplies: era = supply.find_era_at(sess, finish_date) if era is None or era.imp_mpan_core is None: mpan_core_str = "NA" else: mpan_core_str = era.imp_mpan_core hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type ).order_by(HhDatum.start_date)) datum = next(hh_data, None) for current_date in hh_range(start_date, finish_date): if current_date.hour == 0 and current_date.minute == 0: outs.append( "\n" + mpan_core_str + "," + current_date.strftime('%Y-%m-%d')) outs.append(",") if datum is not None and datum.start_date == current_date: outs.append(str(datum.value)) datum = next(hh_data, None) if is_zipped: fname = mpan_core_str + '_' + str(supply.id) + '.csv' zf.writestr(fname.encode('ascii'), titles + ''.join(outs)) else: tf.write(''.join(outs)) outs = [] if is_zipped: zf.close() else: tf.close() except: msg = traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) zf.close() else: tf.write(msg) finally: if sess is not None: sess.close() os.rename(running_name, finished_name)
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_hh_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') for hh_start in hh_range(start_date, finish_date): era = sess.query(Era).filter( Era.supply == supply, Era.start_date <= hh_start, or_( Era.finish_date == null(), Era.finish_date >= hh_start)).one() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, True, caches) titles = [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'HH Start', ''] output_line = [ ds.mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.imp_supplier_contract is not None: contract = era.imp_supplier_contract output_line.append('') supplier_titles = ds.contract_func( contract, 'virtual_bill_titles')() titles.append('') titles.extend(['imp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: contract = era.exp_supplier_contract ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, False, caches) output_line.append('') supplier_titles = ds.contract_func( contract, 'virtual_bill_titles')() titles.append('') titles.extend(['exp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles w.writerow(titles) w.writerow(output_line) except: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ('Date', 'Physical Site Id', 'Physical Site Name', 'Other Site Ids', 'Other Site Names', 'Supply Id', 'Source', 'Generator Type', 'GSP Group', 'DNO Name', 'Voltage Level', 'Metering Type', 'Mandatory HH', 'PC', 'MTC', 'CoP', 'SSC', 'Number Of Registers', 'MOP Contract', 'Mop Account', 'DC Contract', 'DC Account', 'Meter Serial Number', 'Meter Installation Date', 'Latest Normal Meter Read Date', 'Latest Normal Meter Read Type', 'Latest DC Bill Date', 'Latest MOP Bill Date', 'Supply Start Date', 'Supply Finish Date', 'Properties', 'Import ACTIVE?', 'Import REACTIVE_IMPORT?', 'Import REACTIVE_EXPORT?', 'Export ACTIVE?', 'Export REACTIVE_IMPORT?', 'Export REACTIVE_EXPORT?', 'Import MPAN core', 'Import Agreed Supply Capacity (kVA)', 'Import LLFC Code', 'Import LLFC Description', 'Import Supplier Contract', 'Import Supplier Account', 'Import Mandatory kW', 'Latest Import Supplier Bill Date', 'Export MPAN core', 'Export Agreed Supply Capacity (kVA)', 'Export LLFC Code', 'Export LLFC Description', 'Export Supplier Contract', 'Export Supplier Account', 'Export Mandatory kW', 'Latest Export Supplier Bill Date')) NORMAL_READ_TYPES = ('N', 'C', 'N3') year_start = date + HH - relativedelta(years=1) era_ids = sess.query(Era.id).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date)).order_by(Era.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) era_ids = era_ids.filter(Era.supply == supply) if mpan_cores is not None: era_ids = era_ids.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for era_id, in era_ids: era, supply, generator_type = sess.query( Era, Supply, GeneratorType).join( Supply, Era.supply_id == Supply.id).outerjoin( GeneratorType, Supply.generator_type_id == GeneratorType.id).filter( Era.id == era_id).options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras).joinedload( SiteEra.site), joinedload(Era.ssc), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload( Supply.dno)).one() site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) sup_eras = sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all() supply_start_date = sup_eras[0].start_date supply_finish_date = sup_eras[-1].finish_date if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code else: voltage_level_code = era.imp_llfc.voltage_level.code if generator_type is None: generator_type_str = '' else: generator_type_str = generator_type.code metering_type = era.meter_category if metering_type == 'nhh': latest_prev_normal_read = sess.query(RegisterRead). \ join(Bill).join(RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id).order_by( RegisterRead.previous_date.desc()).options( joinedload(RegisterRead.previous_type)).first() latest_pres_normal_read = sess.query(RegisterRead) \ .join(Bill).join(RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply).order_by( RegisterRead.present_date.desc()).options( joinedload(RegisterRead.present_type)).first() if latest_prev_normal_read is None and \ latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif latest_pres_normal_read is not None and \ latest_prev_normal_read is None: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code elif latest_pres_normal_read is None and \ latest_prev_normal_read is not None: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code elif latest_pres_normal_read.present_date > \ latest_prev_normal_read.previous_date: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code else: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = \ hh_format(latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract if mop_contract is None: mop_contract_name = '' mop_account = '' latest_mop_bill_date = 'No MOP' else: mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract).order_by( Bill.finish_date.desc()).first() if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) dc_contract = era.dc_contract if dc_contract is None: dc_contract_name = '' dc_account = '' latest_dc_bill_date = 'No DC' else: dc_contract_name = dc_contract.name dc_account = era.dc_account latest_dc_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == dc_contract).order_by( Bill.finish_date.desc()).first() if latest_dc_bill_date is not None: latest_dc_bill_date = hh_format(latest_dc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel(sess, imp_related, channel_type) is None: channel_values.append('false') else: channel_values.append('true') imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == 'nhh': continue params = { 'supply_id': supply.id, 'year_start': year_start, 'year_finish': date, 'is_import': is_import } month_mds = tuple(md[0] * 2 for md in sess.execute(""" select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params)) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or \ (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = 'yes' else: mandatory_hh = 'no' imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in (True, False): for er in sess.query(Era).filter( Era.supply == era.supply, Era.start_date <= date).order_by( Era.start_date.desc()): if is_import: if er.imp_mpan_core is None: break else: supplier_contract = er.imp_supplier_contract else: if er.exp_mpan_core is None: break else: supplier_contract = er.exp_supplier_contract latest_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.finish_date >= er.start_date, Bill.finish_date <= hh_min(er.finish_date, date), Bill.supply == supply, Batch.contract == supplier_contract).order_by( Bill.finish_date.desc()).first() if latest_bill_date is not None: latest_bill_date = hh_format(latest_bill_date[0]) if is_import: imp_latest_supplier_bill_date = latest_bill_date else: exp_latest_supplier_bill_date = latest_bill_date break meter_installation_date = sess.query(func.min(Era.start_date)) \ .filter(Era.supply == era.supply, Era.msn == era.msn).one()[0] if era.ssc is None: ssc_code = num_registers = None else: ssc_code = era.ssc.code num_registers = sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == era.ssc).count() writer.writerow((( '' if value is None else str(value) )) for value in [ hh_format(date), physical_site.code, physical_site.name, ', '.join(site_codes), ', '.join(site_names), supply.id, supply.source.code, generator_type_str, supply.gsp_group.code, supply.dno.dno_code, voltage_level_code, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, num_registers, mop_contract_name, mop_account, dc_contract_name, dc_account, era.msn, hh_format(meter_installation_date), latest_normal_read_date, latest_normal_read_type, latest_dc_bill_date, latest_mop_bill_date, hh_format(supply_start_date), hh_format(supply_finish_date, ongoing_str=''), era.properties ] + channel_values + [ era.imp_mpan_core, era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era. imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date ] + [ era.exp_mpan_core, era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era. exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date ]) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user, compression): now = utc_datetime_now() report_context = {} try: comp = report_context['computer'] except KeyError: comp = report_context['computer'] = {} try: rate_cache = comp['rates'] except KeyError: rate_cache = comp['rates'] = {} try: ind_cont = report_context['contract_names'] except KeyError: ind_cont = report_context['contract_names'] = {} sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) start_date = scenario_props['scenario_start'] if start_date is None: start_date = utc_datetime(now.year, now.month, 1) else: start_date = to_utc(start_date) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', '').replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'forecast_from' in scenario_props: forecast_from = scenario_props['forecast_from'] else: forecast_from = None if forecast_from is None: forecast_from = chellow.computer.forecast_date() else: forecast_from = to_utc(forecast_from) sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] era_rows = [] for rate_script in get_map_list(scenario_props, 'local_rates'): contract_id = rate_script['contract_id'] try: cont_cache = rate_cache[contract_id] except KeyError: cont_cache = rate_cache[contract_id] = {} try: rate_script_start = rate_script['start_date'] except KeyError: raise BadRequest( "Problem in the scenario properties. Can't find the " + "'start_date' key of the contract " + str(contract_id) + " in the 'local_rates' map.") try: rate_script_start = rate_script['start_date'] except KeyError: raise BadRequest( "Problem in the scenario properties. Can't find the " + "'start_date' key of the contract " + str(contract_id) + " in the 'local_rates' map.") for dt in hh_range(report_context, rate_script_start, rate_script['finish_date']): cont_cache[dt] = PropDict('scenario properties', rate_script['script']) for rate_script in get_map_list(scenario_props, 'industry_rates'): contract_name = rate_script['contract_name'] try: cont_cache = ind_cont[contract_name] except KeyError: cont_cache = ind_cont[contract_name] = {} rfinish = rate_script['finish_date'] if rfinish is None: raise BadRequest("For the industry rate " + contract_name + " the " "finish_date can't be null.") for dt in hh_range(report_context, rate_script['start_date'], rfinish): cont_cache[dt] = PropDict('scenario properties', rate_script['script']) era_maps = scenario_props.get('era_maps', {}) scenario_hh = scenario_props.get('hh_data', {}) era_header_titles = [ 'creation-date', 'imp-mpan-core', 'imp-supplier-contract', 'exp-mpan-core', 'exp-supplier-contract', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month' ] site_header_titles = [ 'creation-date', 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types' ] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp' ] title_dict = {} for cont_type, con_attr in (('mop', Era.mop_contract), ('dc', Era.dc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr).join(Era.supply). \ join(Source).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception("For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \ join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date) ).order_by(Tpr.code).distinct() for tpr in tpr_query.filter(Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['imp-supplier'].append(tpr.code + suffix) for tpr in tpr_query.filter(Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['exp-supplier'].append(tpr.code + suffix) era_rows.append( era_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) site_rows.append(site_header_titles + summary_titles) sites = sites.all() deltas = {} for site in sites: try: site_scenario_hh = scenario_hh[site.code] except KeyError: site_scenario_hh = scenario_hh[site.code] = {} site_deltas = deltas[site.code] = {'hhs': {}} delts = site_deltas['supply_deltas'] = {} for is_import in (True, False): delts[is_import] = {} for src in ('gen', 'net', 'gen-net', '3rd-party', '3rd-party-reverse', 'sub'): delts[is_import][src] = {'site': {}} earliest_delta = to_utc(Datetime.max) latest_delta = to_utc(Datetime.min) found_hh = False for typ in ('used', 'generated', 'parasitic', 'gen_net'): hh_str = site_scenario_hh.get(typ, '') hh_data = site_scenario_hh[typ] = {} for row in csv.reader(StringIO(hh_str)): cells = [cell.strip() for cell in row] if len(''.join(cells)) == 0: continue if len(cells) != 2: raise BadRequest( "Can't interpret the row " + str(cells) + " it should be of the form 'timestamp, kWh'") date_str, kw_str = cells ts = parse_hh_start(date_str) earliest_delta = min(ts, earliest_delta) latest_delta = max(ts, latest_delta) hh_data[ts] = float(kw_str) found_hh = True if not found_hh: continue scenario_used = site_scenario_hh['used'] scenario_generated = site_scenario_hh['generated'] scenario_parasitic = site_scenario_hh['parasitic'] scenario_gen_net = site_scenario_hh['gen_net'] month_start = utc_datetime(earliest_delta.year, earliest_delta.month) while month_start <= latest_delta: month_finish = month_start + relativedelta(months=1) - HH chunk_start = hh_max(month_start, earliest_delta) chunk_finish = hh_min(month_finish, latest_delta) site_ds = chellow.computer.SiteSource(sess, site, chunk_start, chunk_finish, forecast_from, report_context) hh_map = dict((h['start-date'], h) for h in site_ds.hh_data) for era in sess.query(Era).join(SiteEra).join(Pc).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.imp_mpan_core != null(), Pc.code != '00', Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), ~Era.channels.any()): if supply_id is not None and era.supply_id != supply_id: continue ss_start = hh_max(era.start_date, chunk_start) ss_finish = hh_min(era.finish_date, chunk_finish) ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, True, report_context) for hh in ss.hh_data: sdatum = hh_map[hh['start-date']] sdatum['import-net-kwh'] += hh['msp-kwh'] sdatum['used-kwh'] += hh['msp-kwh'] for era in sess.query(Era).join(SiteEra).join(Pc).join( Supply).join(Source).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.imp_mpan_core != null(), Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), Source.code == 'gen-net'): if supply_id is not None and era.supply_id != supply_id: continue ss_start = hh_max(era.start_date, chunk_start) ss_finish = hh_min(era.finish_date, chunk_finish) ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, False, report_context) for hh in ss.hh_data: sdatum = hh_map[hh['start-date']] try: sdatum['gen-net-kwh'] += hh['msp-kwh'] except KeyError: sdatum['gen-net-kwh'] = hh['msp-kwh'] for hh in site_ds.hh_data: hh_start = hh['start-date'] if hh_start in scenario_used: used_delt = scenario_used[hh_start] - hh['used-kwh'] imp_net_delt = 0 exp_net_delt = 0 if used_delt < 0: diff = hh['import-net-kwh'] + used_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt += used_delt else: diff = hh['export-net-kwh'] - used_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt -= used_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['used-kwh'] += used_delt hh['msp-kwh'] -= exp_net_delt if hh_start in scenario_generated: imp_gen_delt = scenario_generated[hh_start] - \ hh['import-gen-kwh'] imp_net_delt = 0 exp_net_delt = 0 if imp_gen_delt < 0: diff = hh['export-net-kwh'] + imp_gen_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt += imp_gen_delt else: diff = hh['import-net-kwh'] - imp_gen_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt -= imp_gen_delt try: delts[True]['gen']['site'][hh_start] += \ imp_gen_delt except KeyError: delts[True]['gen']['site'][hh_start] = imp_gen_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['import-gen-kwh'] += imp_gen_delt hh['msp-kwh'] -= imp_net_delt if hh_start in scenario_parasitic: exp_gen_delt = scenario_parasitic[hh_start] - \ hh['export-gen-kwh'] imp_net_delt = 0 exp_net_delt = 0 if exp_gen_delt < 0: diff = hh['import-net-kwh'] + exp_gen_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt += exp_gen_delt else: diff = hh['export-net-kwh'] - exp_gen_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt -= exp_gen_delt try: delts[False]['gen']['site'][hh_start] += \ imp_gen_delt except KeyError: delts[False]['gen']['site'][hh_start] = \ exp_gen_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['export-gen-kwh'] += exp_gen_delt hh['msp-kwh'] -= imp_net_delt if hh_start in scenario_gen_net: gen_net_delt = scenario_gen_net[hh_start] - \ hh['gen-net-kwh'] try: delts[False]['gen-net']['site'][hh_start] += \ gen_net_delt except KeyError: delts[False]['gen-net']['site'][hh_start] = \ gen_net_delt hh['import-gen-kwh'] += gen_net_delt hh['export-net-kwh'] += gen_net_delt site_deltas['hhs'][hh_start] = hh month_start += relativedelta(months=1) month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) calcs = [] for era in sess.query(Era).join(SiteEra).join(Pc).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.start_date <= month_finish, or_(Era.finish_date == null(), Era.finish_date >= month_start)).options( joinedload(Era.ssc), joinedload(Era.dc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload(Supply.dno), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras)).order_by(Pc.code): supply = era.supply if supply.generator_type is not None: site_gen_types.add(supply.generator_type.code) if supply_id is not None and supply.id != supply_id: continue ss_start = hh_max(era.start_date, month_start) ss_finish = hh_min(era.finish_date, month_finish) if era.imp_mpan_core is None: imp_ss = None else: sup_deltas = site_deltas['supply_deltas'][True][ supply.source.code] imp_ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, True, report_context, era_maps=era_maps, deltas=sup_deltas) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: sup_deltas = site_deltas['supply_deltas'][False][ supply.source.code] exp_ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, False, report_context, era_maps=era_maps, deltas=sup_deltas) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append((order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) # Check if gen deltas haven't been consumed extra_sss = set() for is_imp in (True, False): sup_deltas = site_deltas['supply_deltas'][is_imp]['gen'] if len( list(t for t in sup_deltas['site'] if month_start <= t <= month_finish)) > 0: extra_sss.add(is_imp) displaced_era = chellow.computer.displaced_era( sess, report_context, site, month_start, month_finish, forecast_from, has_scenario_generation=len(extra_sss) > 0) if len(extra_sss) > 0: if True in extra_sss: sup_deltas = site_deltas['supply_deltas'][True]['gen'] imp_ss_name = site.code + "_extra_gen_TRUE" imp_ss = ScenarioSource( sess, month_start, month_finish, True, report_context, sup_deltas, displaced_era.imp_supplier_contract, imp_ss_name) else: imp_ss_name = imp_ss = None if False in extra_sss: exp_ss_name = site.code + "_extra_gen_FALSE" sup_deltas = site_deltas['supply_deltas'][False]['gen'] exp_ss = ScenarioSource( sess, month_start, month_finish, False, report_context, sup_deltas, displaced_era.imp_supplier_contract, imp_ss_name) else: exp_ss_name = exp_ss = None calcs.append((0, imp_ss_name, exp_ss_name, imp_ss, exp_ss)) # Check if exp net deltas haven't been consumed sup_deltas = site_deltas['supply_deltas'][False]['net'] if len( list(t for t in sup_deltas['site'] if month_start <= t <= month_finish)) > 0: ss_name = site.code + "_extra_net_export" ss = SupplySource(sess, month_start, month_finish, forecast_from, displaced_era, False, report_context, era_maps=era_maps, deltas=sup_deltas) calcs.append((0, None, ss_name, None, ss)) site_ds = chellow.computer.SiteSource(sess, site, month_start, month_finish, forecast_from, report_context, displaced_era, deltas=site_deltas) if displaced_era is not None and supply_id is None: month_data = {} for sname in ('import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = month_data['displaced-kwh'] = sum( hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill') if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += 'For the supply ' + \ site_ds.mpan_core + ' the virtual bill ' + \ str(disp_supplier_bill) + ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ now, None, disp_supplier_contract.name, None, None, displaced_era.meter_category, 'displaced', None, None, None, None, site.code, site.name, '', month_finish ] + [month_data[t] for t in summary_titles ] + [None] + [None] * len(title_dict['mop']) + [ None ] + [None] * len(title_dict['dc']) + [ None ] + make_bill_row(title_dict['imp-supplier'], disp_supplier_bill) era_rows.append(out) for k, v in month_data.items(): site_month_data[k] += v for i, (order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: source_code = exp_ss.source_code supply = exp_ss.supply else: source_code = imp_ss.source_code supply = imp_ss.supply site_sources.add(source_code) month_data = {} for name in ('import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if imp_ss is not None: imp_supplier_contract = imp_ss.supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill') if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: gbp = 0 imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['import-net-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh if source_code == 'gen-net': month_data['export-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == 'gen': month_data['import-gen-kwh'] += kwh if exp_ss is not None: exp_supplier_contract = exp_ss.supplier_contract if exp_supplier_contract is not None: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill') export_vb_function(exp_ss) kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['export-net-gbp'] += gbp month_data['export-net-kwh'] += kwh if source_code == 'gen-net': month_data['import-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == '3rd-party-reverse': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = sss.dc_contract if dc_contract is not None: sss.contract_func(dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = sss.mop_contract if mop_contract is not None: mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp month_data['used-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp generator_type = sss.generator_type_code if source_code in ('gen', 'gen-net'): site_gen_types.add(generator_type) era_category = sss.measurement_type if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[era_category]: site_category = era_category era_associates = set() if mop_contract is not None: era_associates.update({ s.site.code for s in era.site_eras if not s.is_physical }) for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / \ bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) if imp_ss is None: imp_supplier_contract_name = None pc_code = exp_ss.pc_code else: if imp_supplier_contract is None: imp_supplier_contract_name = '' else: imp_supplier_contract_name = \ imp_supplier_contract.name pc_code = imp_ss.pc_code if exp_ss is None: exp_supplier_contract_name = None else: if exp_supplier_contract is None: exp_supplier_contract_name = '' else: exp_supplier_contract_name = \ exp_supplier_contract.name out = [ now, imp_mpan_core, imp_supplier_contract_name, exp_mpan_core, exp_supplier_contract_name, era_category, source_code, generator_type, sss.supply_name, sss.msn, pc_code, site.code, site.name, ','.join(sorted(list(era_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + \ make_bill_row(title_dict['mop'], mop_bill) + [None] + \ make_bill_row(title_dict['dc'], dc_bill) if imp_ss is None: out += [None] * (len(title_dict['imp-supplier']) + 1) else: out += [None] + make_bill_row( title_dict['imp-supplier'], imp_supplier_bill) if exp_ss is not None: out += [None] + make_bill_row( title_dict['exp-supplier'], exp_supplier_bill) for k, v in month_data.items(): site_month_data[k] += v era_rows.append(out) site_rows.append([ now, site.code, site.name, ', '.join( s.code for s in site.find_linked_sites( sess, month_start, month_finish)), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types))) ] + [site_month_data[k] for k in summary_titles]) sess.rollback() write_spreadsheet(rf, compression, site_rows, era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def _process( sess, ecoes_lines, exclude_mpan_cores, ignore_mpan_cores_msn, f, show_ignored, report_run, ): writer = csv.writer(f, lineterminator="\n") mpans = [] for exp in (Era.imp_mpan_core, Era.exp_mpan_core): for (v,) in sess.execute( select(exp) .join(Supply) .join(Source) .join(Supply.dno) .filter( Party.dno_code.notin_(("88", "99")), Era.finish_date == null(), Source.code != "3rd-party", exp.notin_(exclude_mpan_cores), exp != null(), ) .distinct() .order_by(exp) ): mpans.append(v) ecoes_mpans = {} parser = iter(csv.reader(ecoes_lines)) next(parser) # Skip titles for values in parser: ecoes_titles = [ "mpan-core", "address-line-1", "address-line-2", "address-line-3", "address-line-4", "address-line-5", "address-line-6", "address-line-7", "address-line-8", "address-line-9", "post-code", "supplier", "registration-from", "mtc", "mtc-date", "llfc", "llfc-from", "pc", "ssc", "measurement-class", "energisation-status", "da", "dc", "mop", "mop-appoint-date", "gsp-group", "gsp-effective-from", "dno", "msn", "meter-install-date", "meter-type", "map-id", ] ecoes_row = dict(zip(ecoes_titles, map(str.strip, values))) mpan_core = ecoes_row["mpan-core"] mpan_spaces = " ".join( ( mpan_core[:2], mpan_core[2:6], mpan_core[6:10], mpan_core[-3:], ) ) if mpan_spaces in exclude_mpan_cores: continue ecoes_row["mpan_spaces"] = mpan_spaces if mpan_spaces in ecoes_mpans: prev_row = ecoes_mpans[mpan_spaces] prev_row["meter_count"] += 1 else: ecoes_row["meter_count"] = 1 ecoes_mpans[mpan_spaces] = ecoes_row titles = ( "mpan_core", "mpan_core_no_spaces", "ecoes_pc", "chellow_pc", "ecoes_mtc", "chellow_mtc", "ecoes_llfc", "chellow_llfc", "ecoes_ssc", "chellow_ssc", "ecoes_es", "chellow_es", "ecoes_supplier", "chellow_supplier", "chellow_supplier_contract_name", "ecoes_dc", "chellow_dc", "ecoes_mop", "chellow_mop", "ecoes_gsp_group", "chellow_gsp_group", "ecoes_msn", "chellow_msn", "ecoes_msn_install_date", "ecoes_meter_type", "chellow_meter_type", "ignored", "problem", ) writer.writerow(titles) for mpan_spaces, ecoes in sorted(ecoes_mpans.items()): problem = "" ignore = True diffs = [] try: ecoes_es = ecoes["energisation-status"] except KeyError as e: raise e ecoes_disconnected = ecoes_es == "" current_chell = mpan_spaces in mpans if ecoes["meter_count"] > 1: problem += ( f"There are {ecoes['meter_count']} meters associated with this MPAN " f"core in ECOES, but Chellow only supports one meter per supply at the " f"moment. If there really should be multiple meters for this supply, " f"let me know and I'll add support for it in Chellow." ) ignore = False if ecoes_disconnected and current_chell: problem += "Disconnected in ECOES, but current in Chellow. " ignore = False elif not ecoes_disconnected and not current_chell: problem += f"In ECOES (as {ecoes_es}) but disconnected in Chellow. " ignore = False if current_chell: mpans.remove(mpan_spaces) era = sess.execute( select(Era) .filter( Era.finish_date == null(), or_( Era.imp_mpan_core == mpan_spaces, Era.exp_mpan_core == mpan_spaces, ), ) .options( joinedload(Era.supply).joinedload(Supply.gsp_group), joinedload(Era.mop_contract) .joinedload(Contract.party) .joinedload(Party.participant), joinedload(Era.dc_contract) .joinedload(Contract.party) .joinedload(Party.participant), joinedload(Era.imp_supplier_contract) .joinedload(Contract.party) .joinedload(Party.participant), joinedload(Era.exp_supplier_contract) .joinedload(Contract.party) .joinedload(Party.participant), joinedload(Era.pc), joinedload(Era.imp_llfc), joinedload(Era.exp_llfc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.ssc), joinedload(Era.energisation_status), joinedload(Era.channels), ) ).scalar() chellow_supply_id = era.supply.id chellow_era_id = era.id chellow_es = era.energisation_status.code if ecoes_es != chellow_es: problem += "The energisation statuses don't match. " ignore = False diffs.append("es") if not (ecoes_es == "D" and chellow_es == "D"): if era.imp_mpan_core == mpan_spaces: supplier_contract = era.imp_supplier_contract llfc = era.imp_llfc else: supplier_contract = era.exp_supplier_contract llfc = era.exp_llfc chellow_pc = era.pc.code try: if int(ecoes["pc"]) != int(chellow_pc): problem += "The PCs don't match. " ignore = False diffs.append("pc") except ValueError: problem += "Can't parse the PC. " ignore = False chellow_mtc = era.mtc.code try: if int(ecoes["mtc"]) != int(chellow_mtc): problem += "The MTCs don't match. " ignore = False diffs.append("mtc") except ValueError: problem += "Can't parse the MTC. " ignore = False chellow_llfc = llfc.code if ecoes["llfc"].zfill(3) != chellow_llfc: problem += "The LLFCs don't match. " ignore = False diffs.append("llfc") chellow_ssc = era.ssc if chellow_ssc is None: chellow_ssc = "" chellow_ssc_int = None else: chellow_ssc = chellow_ssc.code chellow_ssc_int = int(chellow_ssc) if len(ecoes["ssc"]) > 0: ecoes_ssc_int = int(ecoes["ssc"]) else: ecoes_ssc_int = None if ecoes_ssc_int != chellow_ssc_int and not ( ecoes_ssc_int is None and chellow_ssc_int is None ): problem += "The SSCs don't match. " ignore = False diffs.append("ssc") chellow_supplier = supplier_contract.party.participant.code chellow_supplier_contract_name = supplier_contract.name chellow_supplier_contract_id = supplier_contract.id if chellow_supplier != ecoes["supplier"]: problem += "The supplier codes don't match. " ignore = False diffs.append("supplier") dc_contract = era.dc_contract if dc_contract is None: chellow_dc = "" else: chellow_dc = dc_contract.party.participant.code if chellow_dc != ecoes["dc"]: problem += "The DC codes don't match. " ignore = False diffs.append("dc") mop_contract = era.mop_contract if mop_contract is None: chellow_mop = "" else: chellow_mop = mop_contract.party.participant.code if chellow_mop != ecoes["mop"]: problem += "The MOP codes don't match. " ignore = False diffs.append("mop") chellow_gsp_group = era.supply.gsp_group.code if chellow_gsp_group != ecoes["gsp-group"]: problem += "The GSP group codes don't match. " ignore = False diffs.append("gsp_group") chellow_msn = era.msn if chellow_msn is None: chellow_msn = "" if chellow_msn != ecoes["msn"]: problem += "The meter serial numbers don't match. " diffs.append("msn") if mpan_spaces not in ignore_mpan_cores_msn: ignore = False elif mpan_spaces in ignore_mpan_cores_msn: problem += ( "This MPAN core is in mpan_cores_ignore and yet the meter " "serial numbers do match. " ) chellow_meter_type = _meter_type(era) if chellow_meter_type != ecoes["meter-type"]: problem += ( "The meter types don't match. See " "https://dtc.mrasco.com/DataItem.aspx?ItemCounter=0483 " ) ignore = False diffs.append("meter_type") else: chellow_pc = "" chellow_mtc = "" chellow_llfc = "" chellow_ssc = "" chellow_es = "" chellow_supplier = "" chellow_supplier_contract_name = "" chellow_supplier_contract_id = None chellow_dc = "" chellow_mop = "" chellow_gsp_group = "" chellow_msn = "" chellow_meter_type = "" chellow_supply_id = None chellow_era_id = None if len(problem) > 0 and not (not show_ignored and ignore): values = { "mpan_core": mpan_spaces, "mpan_core_no_spaces": ecoes["mpan-core"], "ecoes_pc": ecoes["pc"], "chellow_pc": chellow_pc, "ecoes_mtc": ecoes["mtc"], "chellow_mtc": chellow_mtc, "chellow_mtc_date": ecoes["mtc-date"], "ecoes_llfc": ecoes["llfc"], "ecoes_llfc_from": ecoes["llfc-from"], "chellow_llfc": chellow_llfc, "ecoes_ssc": ecoes["ssc"], "chellow_ssc": chellow_ssc, "ecoes_es": ecoes["energisation-status"], "chellow_es": chellow_es, "ecoes_supplier": ecoes["supplier"], "ecoes_supplier_registration_from": ecoes["registration-from"], "chellow_supplier": chellow_supplier, "chellow_supplier_contract_name": chellow_supplier_contract_name, "ecoes_dc": ecoes["dc"], "chellow_dc": chellow_dc, "ecoes_mop": ecoes["mop"], "ecoes_mop_appoint_date": ecoes["mop-appoint-date"], "chellow_mop": chellow_mop, "ecoes_gsp_group": ecoes["gsp-group"], "ecoes_gsp_effective_from": ecoes["gsp-effective-from"], "chellow_gsp_group": chellow_gsp_group, "ecoes_msn": ecoes["msn"], "chellow_msn": chellow_msn, "ecoes_msn_install_date": ecoes["meter-install-date"], "ecoes_meter_type": ecoes["meter-type"], "chellow_meter_type": chellow_meter_type, "ignored": ignore, "problem": problem, } writer.writerow(values[t] for t in titles) values["chellow_supplier_contract_id"] = chellow_supplier_contract_id values["chellow_supply_id"] = chellow_supply_id values["diffs"] = diffs values["chellow_era_id"] = chellow_era_id report_run.insert_row(sess, "", titles, values, {}) sess.commit() sess.expunge_all() for mpan_core in mpans: supply = Supply.get_by_mpan_core(sess, mpan_core) era = supply.find_era_at(sess, None) if era.imp_mpan_core == mpan_core: supplier_contract = era.imp_supplier_contract llfc = era.imp_llfc else: supplier_contract = era.exp_supplier_contract llfc = era.exp_llfc ssc = "" if era.ssc is None else era.ssc.code es = era.energisation_status.code dc_contract = era.dc_contract dc = "" if dc_contract is None else dc_contract.party.participant.code mop_contract = era.mop_contract mop = "" if mop_contract is None else mop_contract.party.participant.code msn = "" if era.msn is None else era.msn meter_type = _meter_type(era) values = { "mpan_core": mpan_core, "mpan_core_no_spaces": mpan_core.replace(" ", ""), "ecoes_pc": "", "chellow_pc": era.pc.code, "ecoes_mtc": "", "chellow_mtc": era.mtc.code, "ecoes_llfc": "", "chellow_llfc": llfc.code, "ecoes_ssc": "", "chellow_ssc": ssc, "ecoes_es": "", "chellow_es": es, "ecoes_supplier": "", "chellow_supplier": supplier_contract.party.participant.code, "chellow_supplier_contract_name": supplier_contract.name, "ecoes_dc": "", "chellow_dc": dc, "ecoes_mop": "", "chellow_mop": mop, "ecoes_gsp_group": "", "chellow_gsp_group": supply.gsp_group.code, "ecoes_msn": "", "chellow_msn": msn, "ecoes_msn_install_date": "", "ecoes_meter_type": "", "chellow_meter_type": meter_type, "ignored": False, "problem": "In Chellow, but not in ECOES.", } writer.writerow(values[t] for t in titles) values["chellow_supplier_contract_id"] = supplier_contract.id values["chellow_supply_id"] = era.supply.id values["diffs"] = [] values["chellow_era_id"] = era.id report_run.insert_row(sess, "", titles, values, {})
def content(year, month, months, supply_id, user): tmp_file = sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).distinct().options( joinedload(Supply.generator_type)) if supply_id is None: base_name = "supplies_monthly_duration_for_all_supplies_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = "supplies_monthly_duration_for_" + str(supply.id) + \ "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \ ".csv" running_name, finished_name = chellow.dloads.make_names( base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) field_names = ('supply-name', 'source-code', 'generator-type', 'month', 'pc-code', 'msn', 'site-code', 'site-name', 'metering-type', 'import-mpan-core', 'metered-import-kwh', 'metered-import-net-gbp', 'metered-import-estimated-kwh', 'billed-import-kwh', 'billed-import-net-gbp', 'export-mpan-core', 'metered-export-kwh', 'metered-export-estimated-kwh', 'billed-export-kwh', 'billed-export-net-gbp', 'problem', 'timestamp') tmp_file.write('supply-id,' + ','.join(field_names) + '\n') forecast_date = chellow.computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_(Era.finish_date == null(), Era.finish_date >= month_start)): generator_type = supply.generator_type if generator_type is None: generator_type = '' else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.meter_category site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() values = { 'supply-name': supply.name, 'source-code': source_code, 'generator-type': generator_type, 'month': hh_format(month_finish), 'pc-code': era.pc.code, 'msn': era.msn, 'site-code': site.code, 'site-name': site.name, 'metering-type': metering_type, 'problem': '' } tmp_file.write(str(supply.id) + ',') for is_import, pol_name in [(True, 'import'), (False, 'export')]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + '-mpan-core'] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ['hh', 'amr']: est_kwh = sess.query(HhDatum.value).join(Channel) \ .join(Era).filter( HhDatum.status == 'E', Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).first() if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ('net', 'gen-net')): kwh_sum = sess.query( cast(func.sum(HhDatum.value), Float)).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).one()[0] if kwh_sum is not None: kwh += kwh_sum values['metered-' + pol_name + '-estimated-kwh'] = est_kwh values['metered-' + pol_name + '-kwh'] = kwh values['metered-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-kwh'] = 0 values['billed-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-apportioned-kwh'] = 0 values['billed-' + pol_name + '-apportioned-net-gbp'] = 0 values['billed-' + pol_name + '-raw-kwh'] = 0 values['billed-' + pol_name + '-raw-net-gbp'] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = (bill_finish - bill_start).total_seconds() + 30 * 60 overlap_duration = (min(bill_finish, month_finish) - max( bill_start, month_start)).total_seconds() + 30 * 60 overlap_proportion = float(overlap_duration) / float( bill_duration) values['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) values['billed-import-kwh'] += \ overlap_proportion * float(bill.kwh) for era in eras: chunk_start = hh_max(era.start_date, month_start) chunk_finish = hh_min(era.finish_date, month_finish) import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ['net', 'gen-net', '3rd-party']: supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, caches) values['metered-import-kwh'] += sum( datum['msp-kwh'] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, 'virtual_bill') if import_vb_function is None: values['problem'] += "Can't find the " \ "virtual_bill function in the supplier " \ "contract. " else: import_vb_function(supply_source) values['metered-import-net-gbp'] += \ supply_source.supplier_bill['net-gbp'] supply_source.contract_func( era.dc_contract, 'virtual_bill')(supply_source) values['metered-import-net-gbp'] += \ supply_source.dc_bill['net-gbp'] mop_func = supply_source.contract_func( era.mop_contract, 'virtual_bill') if mop_func is None: values['problem'] += " MOP virtual_bill " \ "function can't be found." else: mop_func(supply_source) mop_bill = supply_source.mop_bill values['metered-import-net-gbp'] += \ mop_bill['net-gbp'] if len(mop_bill['problem']) > 0: values['problem'] += \ " MOP virtual bill problem: " + \ mop_bill['problem'] values['timestamp'] = int(time.time() * 1000) tmp_file.write(','.join('"' + str(values[name]) + '"' for name in field_names) + '\n') except BaseException: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def run(self): sess = None try: sess = Session() batch = Batch.get_by_id(sess, self.batch_id) bill_types = keydefaultdict( lambda k: BillType.get_by_code(sess, k)) tprs = keydefaultdict(lambda k: None if k is None else Tpr.get_by_code(sess, k)) read_types = keydefaultdict( lambda k: ReadType.get_by_code(sess, k)) for bf in (sess.query(BatchFile).filter( BatchFile.batch == batch).order_by( BatchFile.upload_timestamp)): self.parser = _process_batch_file(sess, bf, self._log) for self.bill_num, raw_bill in enumerate( self.parser.make_raw_bills()): if "error" in raw_bill: self.failed_bills.append(raw_bill) else: try: mpan_core = raw_bill["mpan_core"] supply = Supply.get_by_mpan_core(sess, mpan_core) with sess.begin_nested(): bill = batch.insert_bill( sess, raw_bill["account"], raw_bill["reference"], raw_bill["issue_date"], raw_bill["start_date"], raw_bill["finish_date"], raw_bill["kwh"], raw_bill["net"], raw_bill["vat"], raw_bill["gross"], bill_types[raw_bill["bill_type_code"]], raw_bill["breakdown"], supply, ) for raw_read in raw_bill["reads"]: bill.insert_read( sess, tprs[raw_read["tpr_code"]], raw_read["coefficient"], raw_read["units"], raw_read["msn"], raw_read["mpan"], raw_read["prev_date"], raw_read["prev_value"], read_types[raw_read["prev_type_code"]], raw_read["pres_date"], raw_read["pres_value"], read_types[raw_read["pres_type_code"]], ) self.successful_bills.append(raw_bill) except KeyError as e: err = raw_bill.get("error", "") raw_bill["error"] = err + " " + str(e) self.failed_bills.append(raw_bill) except BadRequest as e: raw_bill["error"] = str(e.description) self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: sess.commit() self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: sess.rollback() self._log(f"The import has finished, but there were " f"{len(self.failed_bills)} failures, and so the " f"whole import has been rolled back.") except BadRequest as e: sess.rollback() self._log(f"Problem: {e.description}") except BaseException: sess.rollback() self._log(f"I've encountered a problem: {traceback.format_exc()}") finally: if sess is not None: sess.close()
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_hh_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') for hh_start in hh_range(caches, start_date, finish_date): era = sess.query(Era).filter( Era.supply == supply, Era.start_date <= hh_start, or_(Era.finish_date == null(), Era.finish_date >= hh_start)).one() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, True, caches) titles = [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'HH Start', '' ] output_line = [ ds.mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), '' ] mop_titles = ds.contract_func(era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func(era.dc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.dc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.imp_supplier_contract is not None: contract = era.imp_supplier_contract output_line.append('') supplier_titles = ds.contract_func(contract, 'virtual_bill_titles')() titles.append('') titles.extend(['imp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: contract = era.exp_supplier_contract ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, False, caches) output_line.append('') supplier_titles = ds.contract_func(contract, 'virtual_bill_titles')() titles.append('') titles.extend(['exp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles w.writerow(titles) w.writerow(output_line) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user): now = Datetime.now(pytz.utc) report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = rate_start.replace(tzinfo=pytz.utc) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = Datetime( now.year, now.month, 1, tzinfo=pytz.utc) else: start_date = start_date.replace(tzinfo=pytz.utc) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = kwh_start.replace(tzinfo=pytz.utc) sites = sess.query(Site).join(SiteEra).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") f = odswriter.writer(rf, '1.1') group_tab = f.new_sheet("Site Level") sup_tab = f.new_sheet("Supply Level") changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace( tzinfo=pytz.utc) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) sup_header_titles = [ 'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr) \ .join(Era.supply).join(Source).filter( Era.start_date <= start_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles', None) if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) sup_tab.writerow( sup_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) group_tab.writerow(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_associates = set() site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) for group in site.groups( sess, month_start, month_finish, False): site_associates.update( set( s.code for s in group.sites if s.code != site.code)) for cand_supply in group.supplies: site_sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: site_gen_types.add(cand_supply.generator_type.code) for cand_era in sess.query(Era).filter( Era.supply == cand_supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)). \ options( joinedload(Era.channels), joinedload(Era.pc), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if site_category != 'hh': if cand_era.pc.code == '00': site_category = 'hh' elif site_category != 'amr': if len(cand_era.channels) > 0: site_category = 'amr' elif site_category != 'nhh': if cand_era.mtc.meter_type.code \ not in ['UM', 'PH']: site_category = 'nhh' else: site_category = 'unmetered' for group in site.groups( sess, month_start, month_finish, True): calcs = [] deltas = defaultdict(int) group_associates = set( s.code for s in group.sites if s.code != site.code) for supply in group.supplies: if supply_id is not None and supply.id != supply_id: continue for era in sess.query(Era).join(Supply) \ .join(Source).filter( Era.supply == supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)) \ .options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if era.start_date > group.start_date: ss_start = era.start_date else: ss_start = group.start_date if hh_before(era.finish_date, group.finish_date): ss_finish = era.finish_date else: ss_finish = group.finish_date if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, None, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, None, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, group, group.start_date, group.finish_date) site_ds = chellow.computer.SiteSource( sess, site, group.start_date, group.finish_date, kwh_start, None, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill', None) if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += \ 'For the supply ' + \ site_ds.mpan_core + \ ' the virtual bill ' + \ str(disp_supplier_bill) + \ ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = \ month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, ','.join(sorted(list(group_associates))), month_finish] + \ [month_data[t] for t in summary_titles] sup_tab.writerow(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill', None) if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['used-gbp'] -= gbp kwh = sum( hh['msp-kwh'] for hh in imp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-kwh'] += kwh month_data['used-kwh'] -= kwh elif source_code in ('gen', 'gen-net'): month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is None: kwh = sess.query( func.coalesce( func.sum( cast(HhDatum.value, Float)), 0)). \ join(Channel).filter( Channel.era == era, Channel.channel_type == 'ACTIVE', Channel.imp_related == false()).scalar() if source_code == 'gen': month_data['export-net-kwh'] += kwh else: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill', None) export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['export-net-kwh'] += kwh month_data['export-net-gbp'] += gbp elif source_code in \ ('3rd-party', '3rd-party-reverse'): month_data['export-3rd-party-kwh'] += kwh month_data['export-3rd-party-gbp'] += gbp month_data['used-kwh'] -= kwh month_data['used-gbp'] -= gbp elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func( dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None sup_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[sup_category]: site_category = sup_category for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = \ float(overlap_duration) / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ era.imp_mpan_core, era.exp_mpan_core, sup_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(site_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ (mop_bill[t] if t in mop_bill else None) for t in title_dict['mop']] + [None] + \ [(dc_bill[t] if t in dc_bill else None) for t in title_dict['dc']] if imp_supplier_contract is None: out += [None] * \ (len(title_dict['imp-supplier']) + 1) else: out += [None] + [ ( imp_supplier_bill[t] if t in imp_supplier_bill else None) for t in title_dict['imp-supplier']] if exp_supplier_contract is not None: out += [None] + [ ( exp_supplier_bill[t] if t in exp_supplier_bill else None) for t in title_dict['exp-supplier']] for k, v in month_data.items(): site_month_data[k] += v sup_tab.writerow(out) group_tab.writerow( [ site.code, site.name, ''.join(sorted(list(site_associates))), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) sess.rollback() month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) finally: if sess is not None: sess.close() try: f.close() rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def content(supply_id, start_date, finish_date, sess): caches = {} try: supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None hh_start = start_date while not hh_start > finish_date: era = sess.query(Era).filter( Era.supply == supply, Era.start_date <= hh_start, or_( Era.finish_date == null(), Era.finish_date >= hh_start)).one() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, True, None, caches) titles = [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'HH Start', ''] output_line = [ ds.mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.imp_supplier_contract is not None: contract = era.imp_supplier_contract output_line.append('') supplier_titles = ds.contract_func( contract, 'virtual_bill_titles')() titles.append('') titles.extend(['imp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: contract = era.exp_supplier_contract ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, False, None, caches) output_line.append('') supplier_titles = ds.contract_func( contract, 'virtual_bill_titles')() titles.append('') titles.extend(['exp-supplier-' + t for t in supplier_titles]) ds.contract_func(contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles = titles yield ','.join('"' + str(v) + '"' for v in titles) + '\n' yield ','.join('"' + str(v) + '"' for v in output_line) + '\n' hh_start += HH except: yield traceback.format_exc()
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append( {'date': date, 'code': code, 'mpan-core': mpan_core}) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] writer.writerow(vals) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content( start_date, finish_date, supply_id, mpan_cores, is_zipped, user): if is_zipped: file_extension = ".zip" else: file_extension = ".csv" base_name = "hh_data_row_" + start_date.strftime("%Y%m%d%H%M") + \ file_extension titles = ','.join('"' + v + '"' for v in ( "Site Code", "Imp MPAN Core", "Exp Mpan Core", "Start Date", "Import ACTIVE", "Import ACTIVE Status", "Import REACTIVE_IMP", "Import REACTIVE_IMP Status", "Import REACTIVE_EXP", "Import REACTIVE_EXP Status", "Export ACTIVE", "Export ACTIVE Status", "Export REACTIVE_IMP", "Export REACTIVE_IMP Status", "Export REACTIVE_EXP", "Export REACTIVE_EXP Status")) + "\n" running_name, finished_name = chellow.dloads.make_names(base_name, user) if is_zipped: zf = zipfile.ZipFile(running_name, 'w') else: tmp_file = open(running_name, "w") sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), ).order_by(Era.supply_id, Era.start_date).distinct() if supply_id is not None: sup = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Era.supply == sup) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) if not is_zipped: tmp_file.write(titles) for supply in supplies: site, era = sess.query( Site, Era).join(Era.site_eras).filter( Era.supply == supply, Era.start_date <= finish_date, SiteEra.site_id == Site.id, or_( Era.finish_date == null(), Era.finish_date >= start_date), SiteEra.is_physical == true()).order_by(Era.id).first() outs = [] for hh_start_date, imp_active, imp_active_status, \ imp_reactive_imp, imp_reactive_imp_status, \ imp_reactive_exp, imp_reactive_exp_status, \ exp_active, exp_active_status, exp_reactive_imp, \ exp_reactive_imp_status, exp_reactive_exp, \ exp_reactive_exp_status in sess.execute(""" select hh_base.start_date, max(imp_active.value), max(imp_active.status), max(imp_reactive_imp.value), max(imp_reactive_imp.status), max(imp_reactive_exp.value), max(imp_reactive_exp.status), max(exp_active.value), max(exp_active.status), max(exp_reactive_imp.value), max(imp_reactive_imp.status), max(exp_reactive_imp.value), max(imp_reactive_exp.status) from hh_datum hh_base join channel on hh_base.channel_id = channel.id join era on channel.era_id = era.id left join hh_datum imp_active on (imp_active.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'ACTIVE') left join hh_datum imp_reactive_imp on (imp_reactive_imp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_IMP') left join hh_datum imp_reactive_exp on (imp_reactive_exp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_EXP') left join hh_datum exp_active on (exp_active.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'ACTIVE') left join hh_datum exp_reactive_imp on (exp_reactive_imp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_IMP') left join hh_datum exp_reactive_exp on (exp_reactive_exp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_EXP') where supply_id = :supply_id and hh_base.start_date between :start_date and :finish_date group by hh_base.start_date order by hh_base.start_date """, params={ 'supply_id': supply.id, 'start_date': start_date, 'finish_date': finish_date}): outs.append(','.join( '"' + ('' if v is None else str(v)) + '"' for v in ( site.code, era.imp_mpan_core, era.exp_mpan_core, hh_format(hh_start_date), imp_active, imp_active_status, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_exp, imp_reactive_exp_status, exp_active, exp_active_status, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_exp, exp_reactive_exp_status)) + '\n') if is_zipped: zf.writestr( ( "hh_data_row_" + str(era.id) + "_" + str(era.imp_mpan_core) + "_" + str(era.exp_mpan_core)).replace(' ', '') + '.csv', titles + ''.join(outs)) else: tmp_file.write(''.join(outs)) except: msg = "Problem " + traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) else: tmp_file.write(msg) finally: if sess is not None: sess.close() if is_zipped: zf.close() else: tmp_file.close() os.rename(running_name, finished_name)
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow( ( 'Date', 'Physical Site Id', 'Physical Site Name', 'Other Site Ids', 'Other Site Names', 'Supply Id', 'Source', 'Generator Type', 'GSP Group', 'DNO Name', 'Voltage Level', 'Metering Type', 'Mandatory HH', 'PC', 'MTC', 'CoP', 'SSC', 'Number Of Registers', 'MOP Contract', 'Mop Account', 'HHDC Contract', 'HHDC Account', 'Meter Serial Number', 'Meter Installation Date', 'Latest Normal Meter Read Date', 'Latest Normal Meter Read Type', 'Latest DC Bill Date', 'Latest MOP Bill Date', 'Import ACTIVE?', 'Import REACTIVE_IMPORT?', 'Import REACTIVE_EXPORT?', 'Export ACTIVE?', 'Export REACTIVE_IMPORT?', 'Export REACTIVE_EXPORT?', 'Import MPAN core', 'Import Agreed Supply Capacity (kVA)', 'Import LLFC Code', 'Import LLFC Description', 'Import Supplier Contract', 'Import Supplier Account', 'Import Mandatory kW', 'Latest Import Supplier Bill Date', 'Export MPAN core', 'Export Agreed Supply Capacity (kVA)', 'Export LLFC Code', 'Export LLFC Description', 'Export Supplier Contract', 'Export Supplier Account', 'Export Mandatory kW', 'Latest Export Supplier Bill Date')) NORMAL_READ_TYPES = ('N', 'C', 'N3') year_start = date + HH - relativedelta(years=1) eras = sess.query(Era).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date)).order_by( Era.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) eras = eras.filter(Era.supply == supply) if mpan_cores is not None: eras = eras.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for era in eras: site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) supply = era.supply if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code else: voltage_level_code = era.imp_llfc.voltage_level.code if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code metering_type = era.make_meter_category() if metering_type == 'nhh': latest_prev_normal_read = sess.query(RegisterRead). \ join(Bill).join(RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id).order_by( RegisterRead.previous_date.desc()).first() latest_pres_normal_read = sess.query(RegisterRead) \ .join(Bill).join(RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply).order_by( RegisterRead.present_date.desc()).first() if latest_prev_normal_read is None and \ latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif latest_pres_normal_read is not None and \ latest_prev_normal_read is None: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code elif latest_pres_normal_read is None and \ latest_prev_normal_read is not None: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code elif latest_pres_normal_read.present_date > \ latest_prev_normal_read.previous_date: latest_normal_read_date = \ latest_pres_normal_read.present_date latest_normal_read_type = \ latest_pres_normal_read.present_type.code else: latest_normal_read_date = \ latest_prev_normal_read.previous_date latest_normal_read_type = \ latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = \ hh_format(latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract if mop_contract is None: mop_contract_name = '' mop_account = '' latest_mop_bill_date = 'No MOP' else: mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract).order_by( Bill.finish_date.desc()).first() if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) hhdc_contract = era.hhdc_contract if hhdc_contract is None: hhdc_contract_name = '' hhdc_account = '' latest_hhdc_bill_date = 'No HHDC' else: hhdc_contract_name = hhdc_contract.name hhdc_account = era.hhdc_account latest_hhdc_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == hhdc_contract).order_by( Bill.finish_date.desc()).first() if latest_hhdc_bill_date is not None: latest_hhdc_bill_date = hh_format(latest_hhdc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel( sess, imp_related, channel_type) is None: channel_values.append('false') else: channel_values.append('true') imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == 'nhh': continue params = { 'supply_id': supply.id, 'year_start': year_start, 'year_finish': date, 'is_import': is_import} month_mds = tuple( md[0] * 2 for md in sess.execute(""" select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params)) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or \ (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = 'yes' else: mandatory_hh = 'no' imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in [True, False]: if is_import: if era.imp_mpan_core is None: continue else: supplier_contract = era.imp_supplier_contract else: if era.exp_mpan_core is None: continue else: supplier_contract = era.exp_supplier_contract latest_supplier_bill_date = sess.query(Bill.finish_date) \ .join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == supplier_contract).order_by( Bill.finish_date.desc()).first() if latest_supplier_bill_date is not None: latest_supplier_bill_date = \ latest_supplier_bill_date[0] latest_supplier_bill_date = hh_format( latest_supplier_bill_date) if is_import: imp_latest_supplier_bill_date = \ latest_supplier_bill_date else: exp_latest_supplier_bill_date = \ latest_supplier_bill_date meter_installation_date = sess.query(func.min(Era.start_date)) \ .filter(Era.supply == era.supply, Era.msn == era.msn).one()[0] if era.ssc is None: ssc_code = num_registers = None else: ssc_code = era.ssc.code num_registers = sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == era.ssc).count() writer.writerow( ( ('' if value is None else str(value))) for value in [ hh_format(date), physical_site.code, physical_site.name, ', '.join(site_codes), ', '.join(site_names), supply.id, supply.source.code, generator_type, supply.gsp_group.code, supply.dno_contract.name, voltage_level_code, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, num_registers, mop_contract_name, mop_account, hhdc_contract_name, hhdc_account, era.msn, hh_format(meter_installation_date), latest_normal_read_date, latest_normal_read_type, latest_hhdc_bill_date, latest_mop_bill_date] + channel_values + [ era.imp_mpan_core, era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era.imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date] + [ era.exp_mpan_core, era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era.exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date]) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( f"supply_virtual_bills_hh_{supply_id}.csv", user ) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") mop_titles = [] dc_titles = [] imp_supplier_titles = [] exp_supplier_titles = [] for era in sess.execute( select(Era).where( Era.supply == supply, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ) ).scalars(): ds = chellow.computer.SupplySource( sess, era.start_date, era.start_date, forecast_date, era, True, caches ) for t in ds.contract_func(era.mop_contract, "virtual_bill_titles")(): if t not in mop_titles: mop_titles.append(t) for t in ds.contract_func(era.dc_contract, "virtual_bill_titles")(): if t not in dc_titles: dc_titles.append(t) if era.imp_supplier_contract is not None: for t in ds.contract_func( era.imp_supplier_contract, "virtual_bill_titles" )(): if t not in imp_supplier_titles: imp_supplier_titles.append(t) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, era.start_date, era.start_date, forecast_date, era, False, caches, ) for t in ds.contract_func( era.exp_supplier_contract, "virtual_bill_titles" )(): if t not in exp_supplier_titles: exp_supplier_titles.append(t) titles = [ "mpan_core", "site_code", "site_name", "hh_start", ] for pref, t in ( ("mop", mop_titles), ("dc", dc_titles), ("imp_supplier", imp_supplier_titles), ("exp_supplier", exp_supplier_titles), ): titles.append("") titles.extend([f"{pref}_{n}" for n in t]) w.writerow(titles) for hh_start in hh_range(caches, start_date, finish_date): era = sess.execute( select(Era).where( Era.supply == supply, Era.start_date <= hh_start, or_(Era.finish_date == null(), Era.finish_date >= hh_start), ) ).scalar_one() site = sess.execute( select(Site) .join(SiteEra) .where(SiteEra.era == era, SiteEra.is_physical == true()) ).scalar_one() ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, True, caches ) vals = { "mpan_core": ds.mpan_core, "site_code": site.code, "site_name": site.name, "hh_start": hh_format(ds.start_date), } ds.contract_func(era.mop_contract, "virtual_bill")(ds) for k, v in ds.mop_bill.items(): vals[f"mop_{k}"] = v ds.contract_func(era.dc_contract, "virtual_bill")(ds) for k, v in ds.dc_bill.items(): vals[f"dc_{k}"] = v if era.imp_supplier_contract is not None: ds.contract_func(era.imp_supplier_contract, "virtual_bill")(ds) for k, v in ds.supplier_bill.items(): vals[f"imp_supplier_{k}"] = v if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, hh_start, hh_start, forecast_date, era, False, caches ) ds.contract_func(era.exp_supplier_contract, "virtual_bill")(ds) for k, v in ds.supplier_bill.items(): vals[f"exp_supplier_{k}"] = v w.writerow([csv_make_val(vals.get(t)) for t in titles]) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(scenario_props, base_name, site_id, supply_id, user, compression, site_codes, now): report_context = {} try: comp = report_context["computer"] except KeyError: comp = report_context["computer"] = {} try: rate_cache = comp["rates"] except KeyError: rate_cache = comp["rates"] = {} try: ind_cont = report_context["contract_names"] except KeyError: ind_cont = report_context["contract_names"] = {} sess = None try: sess = Session() start_year = scenario_props["scenario_start_year"] start_month = scenario_props["scenario_start_month"] months = scenario_props["scenario_duration"] month_pairs = list( c_months_u(start_year=start_year, start_month=start_month, months=months)) start_date_utc = month_pairs[0][0] finish_date_utc = month_pairs[-1][-1] base_name.append( hh_format(start_date_utc).replace(" ", "_").replace(":", "").replace( "-", "")) base_name.append("for") base_name.append(str(months)) base_name.append("months") if "forecast_from" in scenario_props: forecast_from = scenario_props["forecast_from"] else: forecast_from = None if forecast_from is None: forecast_from = chellow.computer.forecast_date() else: forecast_from = to_utc(forecast_from) sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append("site") base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append("supply") base_name.append(str(supply.id)) sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply) if len(site_codes) > 0: base_name.append("sitecodes") sites = sites.filter(Site.code.in_(site_codes)) running_name, finished_name = chellow.dloads.make_names( "_".join(base_name) + ".ods", user) rf = open(running_name, "wb") site_rows = [] era_rows = [] for rate_script in scenario_props.get("local_rates", []): contract_id = rate_script["contract_id"] try: cont_cache = rate_cache[contract_id] except KeyError: cont_cache = rate_cache[contract_id] = {} try: rate_script_start = rate_script["start_date"] except KeyError: raise BadRequest( f"Problem in the scenario properties. Can't find the " f"'start_date' key of the contract {contract_id} in " f"the 'local_rates' map.") try: rate_script_start = rate_script["start_date"] except KeyError: raise BadRequest( f"Problem in the scenario properties. Can't find the " f"'start_date' key of the contract {contract_id} in " f"the 'local_rates' map.") props = PropDict("scenario properties", rate_script["script"]) for dt in hh_range(report_context, rate_script_start, rate_script["finish_date"]): cont_cache[dt] = props for rate_script in scenario_props.get("industry_rates", []): contract_name = rate_script["contract_name"] try: cont_cache = ind_cont[contract_name] except KeyError: cont_cache = ind_cont[contract_name] = {} rfinish = rate_script["finish_date"] if rfinish is None: raise BadRequest( f"For the industry rate {contract_name} the finish_date " f"can't be null.") for dt in hh_range(report_context, rate_script["start_date"], rfinish): cont_cache[dt] = PropDict("scenario properties", rate_script["script"]) era_maps = scenario_props.get("era_maps", {}) by_hh = scenario_props.get("by_hh", False) scenario_hh = scenario_props.get("hh_data", {}) era_header_titles = [ "creation-date", "imp-mpan-core", "imp-supplier-contract", "exp-mpan-core", "exp-supplier-contract", "metering-type", "source", "generator-type", "supply-name", "msn", "pc", "site-id", "site-name", "associated-site-ids", "month", ] site_header_titles = [ "creation-date", "site-id", "site-name", "associated-site-ids", "month", "metering-type", "sources", "generator-types", ] summary_titles = [ "import-net-kwh", "export-net-kwh", "import-gen-kwh", "export-gen-kwh", "import-3rd-party-kwh", "export-3rd-party-kwh", "displaced-kwh", "used-kwh", "used-3rd-party-kwh", "import-net-gbp", "export-net-gbp", "import-gen-gbp", "export-gen-gbp", "import-3rd-party-gbp", "export-3rd-party-gbp", "displaced-gbp", "used-gbp", "used-3rd-party-gbp", "billed-import-net-kwh", "billed-import-net-gbp", "billed-supplier-import-net-gbp", "billed-dc-import-net-gbp", "billed-mop-import-net-gbp", ] title_dict = {} for cont_type, con_attr in ( ("mop", Era.mop_contract), ("dc", Era.dc_contract), ("imp-supplier", Era.imp_supplier_contract), ("exp-supplier", Era.exp_supplier_contract), ): titles = [] title_dict[cont_type] = titles conts = (sess.query(Contract).join(con_attr).join( Era.supply).join(Source).filter( Era.start_date <= finish_date_utc, or_(Era.finish_date == null(), Era.finish_date >= start_date_utc), ).distinct().order_by(Contract.id)) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, "virtual_bill_titles") if title_func is None: raise Exception( f"For the contract {cont.name} there doesn't seem to " f"be a 'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join( Ssc).join(Era).filter( Era.start_date <= finish_date_utc, or_(Era.finish_date == null(), Era.finish_date >= start_date_utc), ).order_by(Tpr.code).distinct()) for tpr in tpr_query.filter(Era.imp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): title_dict["imp-supplier"].append(tpr.code + suffix) for tpr in tpr_query.filter(Era.exp_supplier_contract != null()): for suffix in ("-kwh", "-rate", "-gbp"): title_dict["exp-supplier"].append(tpr.code + suffix) era_rows.append( era_header_titles + summary_titles + [None] + ["mop-" + t for t in title_dict["mop"]] + [None] + ["dc-" + t for t in title_dict["dc"]] + [None] + ["imp-supplier-" + t for t in title_dict["imp-supplier"]] + [None] + ["exp-supplier-" + t for t in title_dict["exp-supplier"]]) site_rows.append(site_header_titles + summary_titles) sites = sites.all() deltas = {} normal_reads = set() normal_read_rows = [] for site in sites: deltas[site.id] = _make_site_deltas(sess, report_context, site, scenario_hh, forecast_from, supply_id) for month_start, month_finish in month_pairs: for site in sites: if by_hh: sf = [(d, d) for d in hh_range(report_context, month_start, month_finish)] else: sf = [(month_start, month_finish)] for start, finish in sf: normal_reads = normal_reads | _process_site( sess, report_context, forecast_from, start, finish, site, deltas[site.id], supply_id, era_maps, now, summary_titles, title_dict, era_rows, site_rows, ) normal_read_rows = [[ "mpan_core", "date", "msn", "type", "registers" ]] for mpan_core, r in sorted(list(normal_reads)): row = [mpan_core, r.date, r.msn, r.type] + list(r.reads) normal_read_rows.append(row) write_spreadsheet(rf, compression, site_rows, era_rows, normal_read_rows) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + "\n") site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows, normal_read_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + "\n") site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows, normal_read_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names("error.txt", user) ef = open(r_name, "w") ef.write(msg + "\n") ef.close()
def content(year, month, months, supply_id, user): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "register_reads.csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") titles = ( "Duration Start", "Duration Finish", "Supply Id", "Import MPAN Core", "Export MPAN Core", "Batch Reference", "Bill Id", "Bill Reference", "Bill Issue Date", "Bill Type", "Register Read Id", "TPR", "Coefficient", "Previous Read Date", "Previous Read Value", "Previous Read Type", "Present Read Date", "Present Read Value", "Present Read Type", ) w.writerow(titles) month_pairs = list( c_months_u(finish_year=year, finish_month=month, months=months)) start_date, finish_date = month_pairs[0][0], month_pairs[-1][-1] supplies = (sess.query(Supply).join(Bill).join(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), )).order_by(Bill.supply_id).distinct()) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Bill.supply == supply) for supply in supplies: supply_id = supply.id for bill, batch, bill_type in (sess.query( Bill, Batch, BillType).join(Batch).join(BillType).join( RegisterRead).filter( Bill.supply == supply, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), )): era = supply.find_era_at(sess, bill.start_date) if era is None: eras = (sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all()) if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] for read in (sess.query(RegisterRead).filter( RegisterRead.bill == bill, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), ).options( joinedload(RegisterRead.tpr), joinedload(RegisterRead.previous_type), joinedload(RegisterRead.present_type), )): vals = [ start_date, finish_date, supply_id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, bill.issue_date, bill_type.code, read.id, "md" if read.tpr is None else read.tpr.code, read.coefficient, read.previous_date, read.previous_value, read.previous_type.code, read.present_date, read.present_value, read.present_type.code, ] w.writerow(csv_make_val(v) for v in vals) # Avoid a long-running transaction sess.rollback() except BadRequest as e: w.writerow([e.description]) except BaseException: msg = traceback.format_exc() f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content( start_date_ct, finish_date_ct, imp_related, channel_type, is_zipped, supply_id, mpan_cores, user, ): start_date, finish_date = to_utc(start_date_ct), to_utc(finish_date_ct) zf = sess = tf = None base_name = ["supplies_hh_data", finish_date_ct.strftime("%Y%m%d%H%M")] cache = {} try: sess = Session() supplies = (sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, ).order_by(Supply.id).distinct()) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = (sess.query(Era).filter( Era.supply == supply, or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, ).order_by(Era.start_date).first()) if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(" ", "_")) if mpan_cores is not None: supplies = supplies.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append("filter") cf = StringIO() writer = csv.writer(cf, lineterminator="\n") titles = [ "Import MPAN Core", "Export MPAN Core", "Import Related?", "Channel Type", "HH Start Clock-Time", ] + list(range(1, 51)) writer.writerow(titles) titles_csv = cf.getvalue() cf.close() running_name, finished_name = chellow.dloads.make_names( "_".join(base_name) + (".zip" if is_zipped else ".csv"), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, mode="w", newline="") tf.write(titles_csv) for supply in supplies: cf = StringIO() writer = csv.writer(cf, lineterminator="\n") era = supply.find_era_at(sess, finish_date) if era is None: imp_mpan_core_str = exp_mpan_core_str = "NA" else: if era.imp_mpan_core is None: imp_mpan_core_str = "NA" else: imp_mpan_core_str = era.imp_mpan_core if era.exp_mpan_core is None: exp_mpan_core_str = "NA" else: exp_mpan_core_str = era.exp_mpan_core imp_related_str = "TRUE" if imp_related else "FALSE" hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type, ).order_by(HhDatum.start_date)) datum = next(hh_data, None) row = [] for current_date in hh_range(cache, start_date, finish_date): dt_ct = to_ct(current_date) if dt_ct.hour == 0 and dt_ct.minute == 0: if len(row) > 0: writer.writerow(row) row = [ imp_mpan_core_str, exp_mpan_core_str, imp_related_str, channel_type, dt_ct.strftime("%Y-%m-%d"), ] if datum is not None and datum.start_date == current_date: row.append(datum.value) datum = next(hh_data, None) else: row.append(None) if len(row) > 0: writer.writerow(row) if is_zipped: fname = "_".join((imp_mpan_core_str, exp_mpan_core_str, str(supply.id) + ".csv")) zf.writestr(fname.encode("ascii"), titles_csv + cf.getvalue()) else: tf.write(cf.getvalue()) cf.close() # Avoid long-running transaction sess.rollback() if is_zipped: zf.close() else: tf.close() except BaseException: msg = traceback.format_exc() if is_zipped: zf.writestr("error.txt", msg) zf.close() else: tf.write(msg) finally: if sess is not None: sess.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): tmp_file = sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).distinct() if supply_id is None: base_name = "supplies_monthly_duration_for_all_supplies_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = "supplies_monthly_duration_for_" + str(supply.id) + \ "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \ ".csv" running_name, finished_name = chellow.dloads.make_names( base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) field_names = ( 'supply-name', 'source-code', 'generator-type', 'month', 'pc-code', 'msn', 'site-code', 'site-name', 'metering-type', 'import-mpan-core', 'metered-import-kwh', 'metered-import-net-gbp', 'metered-import-estimated-kwh', 'billed-import-kwh', 'billed-import-net-gbp', 'export-mpan-core', 'metered-export-kwh', 'metered-export-estimated-kwh', 'billed-export-kwh', 'billed-export-net-gbp', 'problem', 'timestamp') tmp_file.write('supply-id,' + ','.join(field_names) + '\n') forecast_date = chellow.computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)): generator_type = supply.generator_type if generator_type is None: generator_type = '' else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.make_meter_category() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() values = { 'supply-name': supply.name, 'source-code': source_code, 'generator-type': generator_type, 'month': hh_format(month_finish), 'pc-code': era.pc.code, 'msn': era.msn, 'site-code': site.code, 'site-name': site.name, 'metering-type': metering_type, 'problem': ''} tmp_file.write(str(supply.id) + ',') for is_import, pol_name in [ (True, 'import'), (False, 'export')]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + '-mpan-core'] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ['hh', 'amr']: est_kwh = sess.query(HhDatum.value).join(Channel) \ .join(Era).filter( HhDatum.status == 'E', Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).first() if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ('net', 'gen-net')): kwh_sum = sess.query( cast(func.sum(HhDatum.value), Float) ).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).one()[0] if kwh_sum is not None: kwh += kwh_sum values['metered-' + pol_name + '-estimated-kwh'] = est_kwh values['metered-' + pol_name + '-kwh'] = kwh values['metered-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-kwh'] = 0 values['billed-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-apportioned-kwh'] = 0 values['billed-' + pol_name + '-apportioned-net-gbp'] = 0 values['billed-' + pol_name + '-raw-kwh'] = 0 values['billed-' + pol_name + '-raw-net-gbp'] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + 30 * 60 overlap_duration = ( min(bill_finish, month_finish) - max(bill_start, month_start)).total_seconds() + 30 * 60 overlap_proportion = float( overlap_duration) / float(bill_duration) values['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) values['billed-import-kwh'] += \ overlap_proportion * float(bill.kwh) for era in eras: if era.start_date > month_start: chunk_start = era.start_date else: chunk_start = month_start if hh_after(era.finish_date, month_finish): chunk_finish = month_finish else: chunk_finish = era.finish_date import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ['net', 'gen-net', '3rd-party']: supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) values['metered-import-kwh'] += sum( datum['msp-kwh'] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, 'virtual_bill') if import_vb_function is None: values['problem'] += "Can't find the " \ "virtual_bill function in the supplier " \ "contract. " else: import_vb_function(supply_source) values['metered-import-net-gbp'] += \ supply_source.supplier_bill['net-gbp'] supply_source.contract_func( era.hhdc_contract, 'virtual_bill')(supply_source) values['metered-import-net-gbp'] += \ supply_source.dc_bill['net-gbp'] mop_func = supply_source.contract_func( era.mop_contract, 'virtual_bill') if mop_func is None: values['problem'] += " MOP virtual_bill " \ "function can't be found." else: mop_func(supply_source) mop_bill = supply_source.mop_bill values['metered-import-net-gbp'] += \ mop_bill['net-gbp'] if len(mop_bill['problem']) > 0: values['problem'] += \ " MOP virtual bill problem: " + \ mop_bill['problem'] values['timestamp'] = int(time.time() * 1000) tmp_file.write( ','.join( '"' + str(values[name]) + '"' for name in field_names) + '\n') except: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] w.writerow( ( 'Chellow Supply Id', 'Report Start', 'Report Finish', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() supply_from = hh_max(eras[0].start_date, year_start) supply_to = hh_min(eras[-1].finish_date, year_finish) for era in eras: meter_type = era.meter_category period_start = hh_max(era.start_date, year_start) period_finish = hh_min(era.finish_date, year_finish) max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and ( not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: block_start = hh_max(pair_start, period_start) block_finish = hh_min( pair_finish, period_finish) if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': year_seconds = ( year_finish - year_start).total_seconds() + 60 * 30 period_seconds = ( period_finish - period_start).total_seconds() + 60 * 30 total_kwh[meter_type] += era.imp_sc * period_seconds / \ year_seconds normal_days[meter_type] += period_seconds / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 w.writerow( [ supply.id, hh_format(year_start), hh_format(year_finish), mpan_core, site.code, site.name, hh_format(supply_from), hh_format(supply_to), breakdown] + [ normal_days[t] for t in meter_types] + [ max_normal_days[t] for t in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[t] for t in meter_types] + [filled_kwh[t] for t in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) # avoid a long running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def _process_VAT(rows, headers): sess = headers["sess"] mpan_core = headers["mpan_core"] start_date = headers["start_date"] reads = headers["reads"] supply = Supply.get_by_mpan_core(sess, mpan_core) era = supply.find_era_at(sess, start_date) bill_elements = [] if era is None: era = supply.find_last_era(sess) if era is not None and era.ssc is not None: try: ssc_lookup = era.imp_mpan_core tpr_map = SSC_MAP[ssc_lookup] except KeyError: ssc_lookup = era.ssc.code try: tpr_map = SSC_MAP[ssc_lookup] except KeyError: raise BadRequest("The SSC " + ssc_lookup + " isn't in the SSC_MAP.") for read in reads: desc = read["tpr_code"] try: read["tpr_code"] = tpr_map[desc] except KeyError: raise BadRequest("The description " + desc + " isn't in the SSC_MAP " "for the SSC " + ssc_lookup + ".") for el in headers["bill_elements"]: if el.titles is None: try: tpr = tpr_map[el.desc] except KeyError: raise BadRequest( f"The billing element description {el.desc} isn't in " f"the SSC_MAP for the SSC {ssc_lookup}.") titles = (tpr + "-gbp", tpr + "-rate", tpr + "-kwh") else: titles = el.titles bill_elements.append( BillElement(gbp=el.gbp, titles=titles, rate=el.rate, cons=el.cons, desc=None)) else: for read in reads: read["tpr_code"] = "00001" for el in headers["bill_elements"]: if el.titles is None: des = el.desc titles = (des + "-kwh", des + "-rate", des + "-gbp") else: titles = el.titles bill_elements.append( BillElement(gbp=el.gbp, titles=titles, rate=el.rate, cons=el.cons, desc=None)) breakdown = headers["breakdown"] for bill_el in bill_elements: eln_gbp, eln_rate, eln_cons = bill_el.titles breakdown[eln_gbp] = bill_el.gbp rate = bill_el.rate if eln_rate is not None and rate is not None: try: rates = breakdown[eln_rate] except KeyError: rates = breakdown[eln_rate] = set() rates.add(rate) cons = bill_el.cons if eln_cons is not None and cons is not None: breakdown[eln_cons] = cons return { "kwh": headers["kwh"], "reference": headers["reference"], "mpan_core": mpan_core, "issue_date": headers["issue_date"], "account": headers["account"], "start_date": start_date, "finish_date": headers["finish_date"], "net": headers["net"], "vat": headers["vat"], "gross": headers["gross"], "breakdown": breakdown, "reads": reads, "bill_type_code": headers["bill_type_code"], }
def content(start_date, finish_date, supply_id, mpan_cores, is_zipped, user): if is_zipped: file_extension = ".zip" else: file_extension = ".csv" base_name = ( "hh_data_row_" + to_ct(start_date).strftime("%Y%m%d%H%M") + file_extension ) tls = ["Site Code", "Imp MPAN Core", "Exp Mpan Core", "HH Start Clock-Time"] for polarity in ("Import", "Export"): for suffix in ( "ACTIVE kWh", "ACTIVE Status", "ACTIVE Modified", "REACTIVE_IMP kVArh", "REACTIVE_IMP Status", "REACTIVE_IMP Modified", "REACTIVE_EXP kVArh", "REACTIVE_EXP Status", "REACTIVE_EXP Modified", ): tls.append(polarity + " " + suffix) titles = csv_str(tls) running_name, finished_name = chellow.dloads.make_names(base_name, user) if is_zipped: zf = zipfile.ZipFile(running_name, "w") else: tmp_file = open(running_name, "w") sess = None try: sess = Session() caches = {} supplies = ( sess.query(Supply) .join(Era) .filter( Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ) .order_by(Era.supply_id, Era.start_date) .distinct() ) if supply_id is not None: sup = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Era.supply == sup) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores) ) ) if not is_zipped: tmp_file.write(titles) for supply in supplies: site, era = ( sess.query(Site, Era) .join(Era.site_eras) .filter( Era.supply == supply, Era.start_date <= finish_date, SiteEra.site_id == Site.id, or_(Era.finish_date == null(), Era.finish_date >= start_date), SiteEra.is_physical == true(), ) .order_by(Era.id) .first() ) outs = [] data = iter( sess.execute( """ select hh_base.start_date, max(imp_active.value), max(imp_active.status), max(imp_active.last_modified), max(imp_reactive_imp.value), max(imp_reactive_imp.status), max(imp_reactive_imp.last_modified), max(imp_reactive_exp.value), max(imp_reactive_exp.status), max(imp_reactive_exp.last_modified), max(exp_active.value), max(exp_active.status), max(exp_active.last_modified), max(exp_reactive_imp.value), max(imp_reactive_imp.status), max(exp_reactive_imp.last_modified), max(exp_reactive_exp.value), max(exp_reactive_exp.status), max(exp_reactive_exp.last_modified) from hh_datum hh_base join channel on hh_base.channel_id = channel.id join era on channel.era_id = era.id left join hh_datum imp_active on (imp_active.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'ACTIVE') left join hh_datum imp_reactive_imp on (imp_reactive_imp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_IMP') left join hh_datum imp_reactive_exp on (imp_reactive_exp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_EXP') left join hh_datum exp_active on (exp_active.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'ACTIVE') left join hh_datum exp_reactive_imp on (exp_reactive_imp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_IMP') left join hh_datum exp_reactive_exp on (exp_reactive_exp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_EXP') where supply_id = :supply_id and hh_base.start_date between :start_date and :finish_date group by hh_base.start_date order by hh_base.start_date """, params={ "supply_id": supply.id, "start_date": start_date, "finish_date": finish_date, }, ) ) datum = next(data, None) for dt in hh_range(caches, start_date, finish_date): row = [site.code, era.imp_mpan_core, era.exp_mpan_core, dt] if datum is not None: ( hh_start_date, imp_active, imp_active_status, imp_active_modified, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_imp_modified, imp_reactive_exp, imp_reactive_exp_status, imp_reactive_exp_modified, exp_active, exp_active_status, exp_active_modified, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_imp_modified, exp_reactive_exp, exp_reactive_exp_status, exp_reactive_exp_modified, ) = datum if hh_start_date == dt: datum = next(data, None) row += [ imp_active, imp_active_status, imp_active_modified, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_imp_modified, imp_reactive_exp, imp_reactive_exp_status, imp_reactive_exp_modified, exp_active, exp_active_status, exp_active_modified, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_imp_modified, exp_reactive_exp, exp_reactive_exp_status, exp_reactive_exp_modified, ] outs.append(csv_str(row)) if is_zipped: zf.writestr( ( "hh_data_row_" + str(era.id) + "_" + str(era.imp_mpan_core) + "_" + str(era.exp_mpan_core) ).replace(" ", "") + ".csv", titles + "".join(outs), ) else: tmp_file.write("".join(outs)) # Avoid a long-running transaction sess.rollback() except BaseException: msg = "Problem " + traceback.format_exc() if is_zipped: zf.writestr("error.txt", msg) else: tmp_file.write(msg) finally: if sess is not None: sess.close() if is_zipped: zf.close() else: tmp_file.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'register_reads.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') w.writerow( ('Duration Start', 'Duration Finish', 'Supply Id', 'Import MPAN Core', 'Export MPAN Core', 'Batch Reference', 'Bill Id', 'Bill Reference', 'Bill Issue Date', 'Bill Type', 'Register Read Id', 'TPR', 'Coefficient', 'Previous Read Date', 'Previous Read Value', 'Previous Read Type', 'Present Read Date', 'Present Read Value', 'Present Read Type')) finish_date = utc_datetime(year, month, 1) + \ relativedelta(months=1) - HH start_date = utc_datetime(year, month, 1) - \ relativedelta(months=months-1) reads = sess.query(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date))) \ .join(Bill).order_by(Bill.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) reads = reads.filter(Bill.supply == supply) for read in reads: bill = read.bill supply = bill.supply batch = bill.batch era = supply.find_era_at(sess, bill.start_date) if era is None: eras = sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all() if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] w.writerow(('' if val is None else val) for val in [ hh_format(start_date), hh_format(finish_date), supply.id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, hh_format(bill.issue_date), bill.bill_type.code, read.id, 'md' if read.tpr is None else read.tpr.code, read.coefficient, hh_format(read.previous_date), read.previous_value, read.previous_type.code, hh_format(read.present_date), read.present_value, read.present_type.code ]) # Avoid a long-running transaction sess.rollback() except BadRequest as e: w.writerow([e.description]) except BaseException: msg = traceback.format_exc() f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, start_date, finish_date, user): caches = {} try: sess = Session() supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None running_name, finished_name = chellow.dloads.make_names( "supply_virtual_bills_hh_" + str(supply_id) + ".csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") for hh_start in hh_range(caches, start_date, finish_date): era = (sess.query(Era).filter( Era.supply == supply, Era.start_date <= hh_start, or_(Era.finish_date == null(), Era.finish_date >= hh_start), ).one()) site = (sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one()) ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, True, caches) titles = [ "MPAN Core", "Site Code", "Site Name", "Account", "HH Start", "" ] output_line = [ ds.mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), "", ] mop_titles = ds.contract_func(era.mop_contract, "virtual_bill_titles")() titles.extend(["mop-" + t for t in mop_titles]) ds.contract_func(era.mop_contract, "virtual_bill")(ds) bill = ds.mop_bill for title in mop_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) output_line.append("") dc_titles = ds.contract_func(era.dc_contract, "virtual_bill_titles")() titles.append("") titles.extend(["dc-" + t for t in dc_titles]) ds.contract_func(era.dc_contract, "virtual_bill")(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if era.imp_supplier_contract is not None: contract = era.imp_supplier_contract output_line.append("") supplier_titles = ds.contract_func(contract, "virtual_bill_titles")() titles.append("") titles.extend(["imp-supplier-" + t for t in supplier_titles]) ds.contract_func(contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if era.exp_supplier_contract is not None: contract = era.exp_supplier_contract ds = chellow.computer.SupplySource(sess, hh_start, hh_start, forecast_date, era, False, caches) output_line.append("") supplier_titles = ds.contract_func(contract, "virtual_bill_titles")() titles.append("") titles.extend(["exp-supplier-" + t for t in supplier_titles]) ds.contract_func(contract, "virtual_bill")(ds) bill = ds.supplier_bill for title in supplier_titles: output_line.append(csv_make_val(bill.get(title, ""))) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, csv_make_val(bill[k])]) if titles != prev_titles: prev_titles = titles w.writerow(titles) w.writerow(output_line) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) w.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content( start_year, start_month, start_day, finish_year, finish_month, finish_day, is_import, supply_id, sess): try: start_date = Datetime( start_year, start_month, start_day, tzinfo=pytz.utc) finish_date = Datetime( finish_year, finish_month, finish_day, tzinfo=pytz.utc) + \ relativedelta(days=1) - HH caches = {} supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() day_start = start_date prev_bill_titles = [] while not day_start > finish_date: day_finish = day_start + relativedelta(days=1) - HH for era in supply.find_eras(sess, day_start, day_finish): if era.start_date > day_start: chunk_start = era.start_date else: chunk_start = day_start if hh_after(era.finish_date, day_finish): chunk_finish = day_finish else: chunk_finish = era.finish_date ss = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, None, caches) sup_con = ss.supplier_contract bill_titles = chellow.computer.contract_func( caches, sup_con, 'virtual_bill_titles', None)() if bill_titles != prev_bill_titles: yield ','.join( [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) + '\n' prev_bill_titles = bill_titles site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() yield ','.join('"' + str(value) + '"' for value in [ ss.mpan_core, site.code, site.name, ss.supplier_account, hh_format(ss.start_date), hh_format(ss.finish_date)]) chellow.computer.contract_func( caches, sup_con, 'virtual_bill', None)(ss) bill = ss.supplier_bill for title in bill_titles: if title in bill: val_raw = bill[title] if isinstance(val_raw, Datetime): val = hh_format(val_raw) else: val = str(val_raw) yield ',"' + val + '"' del bill[title] else: yield ',""' for k in sorted(bill.keys()): yield ',"' + k + '","' + str(bill[k]) + '"' yield '\n' day_start += relativedelta(days=1) except: yield traceback.format_exc()