def test_set_status(mocker): hhs = {utc_datetime(2012, 2, 1): {}} read_list = [{'date': utc_datetime(2012, 1, 1)}] forecast_date = utc_datetime(2012, 3, 1) chellow.computer._set_status(hhs, read_list, forecast_date) assert hhs == {utc_datetime(2012, 2, 1): {'status': 'A'}}
def test_find_pair(mocker): is_forwards = True first_read = {'date': utc_datetime(2010, 1, 1), 'reads': {}, 'msn': 'kh'} second_read = {'date': utc_datetime(2010, 2, 1), 'reads': {}, 'msn': 'kh'} read_list = [first_read, second_read] pair = chellow.computer._find_pair(is_forwards, read_list) assert pair['start-date'] == utc_datetime(2010, 1, 1)
def test_u_months_u_start_none(): start_year, start_month = 2009, 3 month_1 = next( u_months_u(start_year=start_year, start_month=start_month, months=None)) assert month_1 == (utc_datetime(2009, 3), utc_datetime(2009, 3, 31, 23, 30))
def test_update_era_llfc_valid_to(mocker): """ Error raised if LLFC finishes before the era """ llfc = mocker.Mock() llfc.valid_from = utc_datetime(2000, 1, 1) llfc.valid_to = utc_datetime(2010, 5, 1) start_date = utc_datetime(2010, 1, 1) finish_date = utc_datetime(2011, 1, 1) mop_account = "A mop account" dc_account = "A dc account" msn = "mtr001" mtc_code = "845" properties = {} imp_mpan_core = "22 9877 3472 588" imp_llfc_code = "510" imp_supplier_contract = mocker.Mock() imp_supplier_contract.start_date.return_value = utc_datetime(2000, 1, 1) imp_supplier_contract.finish_date.return_value = None instance = mocker.Mock() instance.supply.dno.dno_code = '22' instance.supply.dno.get_llfc_by_code.return_value = llfc with pytest.raises( BadRequest, match="The imp line loss factor 510 is only valid until " "2010-05-01 00:00 but the era ends at 2011-01-01 00:00."): Era.update(instance, mocker.Mock(), start_date, finish_date, mocker.Mock(), mop_account, mocker.Mock(), dc_account, msn, mocker.Mock(), mtc_code, mocker.Mock(), mocker.Mock(), properties, imp_mpan_core, imp_llfc_code, imp_supplier_contract, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock())
def test_set_status(mocker): hhs = {utc_datetime(2012, 2, 1): {}} read_list = [{"date": utc_datetime(2012, 1, 1)}] forecast_date = utc_datetime(2012, 3, 1) chellow.computer._set_status(hhs, read_list, forecast_date) assert hhs == {utc_datetime(2012, 2, 1): {"status": "A"}}
def test_MTC_find_by_code(sess): market_role_R = MarketRole.insert(sess, "R", "Distributor") participant = Participant.insert(sess, "CALB", "AK Industries") dno = participant.insert_party(sess, market_role_R, "WPD", utc_datetime(2000, 1, 1), None, "22") code = "034" meter_type = MeterType.insert(sess, "C5", "COP 1-5", utc_datetime(2000, 1, 1), None) meter_payment_type = MeterPaymentType.insert(sess, "CR", "Credit", utc_datetime(1996, 1, 1), None) Mtc.insert( sess, dno, code, "an mtc", False, False, True, meter_type, meter_payment_type, 1, utc_datetime(2000, 1, 1), None, ) sess.commit() mtc = Mtc.find_by_code(sess, dno, "34", utc_datetime(2000, 1, 1)) assert mtc.code == code
def test_make_reads_backwards(mocker): is_forwards = False msn = "k" read_a = {"date": utc_datetime(2018, 3, 10), "msn": msn} read_b = {"date": utc_datetime(2018, 3, 13), "msn": msn} prev_reads = iter([read_a]) pres_reads = iter([read_b]) actual = list( chellow.computer._make_reads(is_forwards, prev_reads, pres_reads)) expected = [read_b, read_a] assert actual == expected
def test_write_sites(mocker): sess = mocker.Mock() caches = {} writer = mocker.Mock() year = 2010 year_start = to_utc(ct_datetime(year - 1, 4, 1)) year_finish = to_utc(ct_datetime(year, 3, 31, 23, 30)) month_start = to_utc(utc_datetime(year, 3, 1)) site_id = None site = mocker.Mock() era = mocker.Mock() source_codes = ("gen", "gen-net") ms = mocker.patch("chellow.reports.report_181._make_sites", autospec=True) ms.return_value = [site] de = mocker.patch("chellow.computer.displaced_era", autospec=True) de.return_value = era ss = mocker.patch("chellow.computer.SiteSource", autospec=True) ss_instance = ss.return_value ss_instance.hh_data = [{ "start-date": month_start, "triad-actual-1-date": utc_datetime(year, 1, 12), "triad-actual-1-msp-kw": 12, "triad-actual-1-laf": 1, "triad-actual-1-gsp-kw": 12, "triad-actual-2-date": utc_datetime(year, 1, 12), "triad-actual-2-msp-kw": 12, "triad-actual-2-laf": 1, "triad-actual-2-gsp-kw": 12, "triad-actual-3-date": utc_datetime(year, 1, 12), "triad-actual-3-msp-kw": 12, "triad-actual-3-laf": 1, "triad-actual-3-gsp-kw": 12, "triad-actual-gsp-kw": 12, "triad-actual-rate": 10, "triad-actual-gbp": 120, }] ss_instance.supplier_bill_hhs = {month_start: {}} mocker.patch("chellow.duos.duos_vb", autospec=True) mocker.patch("chellow.triad.hh", autospec=True) forecast_date = chellow.computer.forecast_date() chellow.reports.report_181._write_sites(sess, caches, writer, year, site_id) ms.assert_called_once_with(sess, year_start, year_finish, site_id, source_codes) de.assert_called_once_with(sess, caches, site, month_start, year_finish, forecast_date)
def test_fetch_cvs(mocker, sess): market_role_Z = MarketRole.insert(sess, "Z", "Non-core") participant = Participant.insert(sess, "CALB", "AK Industries") participant.insert_party(sess, market_role_Z, "None core", utc_datetime(2000, 1, 1), None, None) properties = { "enabled": True, "url": "https://example.com", } Contract.insert_non_core(sess, "g_cv", "", properties, to_utc(ct_datetime(2014, 6, 1)), None, {}) sess.commit() mock_response = mocker.Mock() with open("test/test_g_cv/cv.csv") as f: mock_response.text = f.read() mock_requests = mocker.patch("chellow.g_cv.requests") mock_requests.post = mocker.Mock(return_value=mock_response) messages = [] def log_f(msg): messages.append(msg) chellow.g_cv.fetch_cvs(sess, log_f) assert messages[-1] == "Added new rate script."
def test_general_import_llfc_update_valid_to_no_change(sess): participant = Participant.insert(sess, "CALB", "AK Industries") market_role_R = MarketRole.insert(sess, "R", "Distributor") dno = participant.insert_party(sess, market_role_R, "WPD", utc_datetime(2000, 1, 1), None, "10") insert_voltage_levels(sess) voltage_level = VoltageLevel.get_by_code(sess, "HV") dno.insert_llfc( sess, "328", "PC 5-8 & HH HV", voltage_level, False, True, to_utc(ct_datetime(2020, 4, 1)), None, ) sess.commit() action = "update" vals = [ "10", "328", "2020-04-01 00:00", "Reserved EHV 33kV - Import", "HV", "False", "{no change}", "{no change}", ] args = [] chellow.general_import.general_import_llfc(sess, action, vals, args)
def do_get(sess): base_name = [] year = req_int("finish_year") month = req_int("finish_month") months = req_int("months") start_date = utc_datetime(year, month, 1) - relativedelta(months=months - 1) base_name.append('g_monthly_duration') site_id = req_int('site_id') if 'site_id' in request.values else None if 'g_supply_id' in request.values: g_supply_id = req_int('g_supply_id') else: g_supply_id = None if 'compression' in request.values: compression = req_bool('compression') else: compression = True user = g.user threading.Thread(target=content, args=(base_name, site_id, g_supply_id, user, compression, start_date, months)).start() return chellow_redirect("/downloads", 303)
def test_process_segment_ccd2_blank_CONS(mocker): code = "CCD" elements = { "CCDE": ["2", "ADD"], "TCOD": ["584867", "AAHEDC"], "TMOD": [], "MTNR": [], "MLOC": ["22767395756734"], "PRDT": [], "PVDT": [], "NDRP": [], "PRRD": [], "CONS": [""], "CONB": [], "ADJF": ["UG"], "CONA": [], "BPRI": ["974"], "NUCT": ["877457492", "KWH"], "CSDT": ["191001"], "CEDT": ["191101"], "CPPU": ["748"], } line = "" headers = { "reference": "hgdertk", "bill_type_code": "N", "issue_date": utc_datetime(2019, 9, 3), } chellow.bill_parser_engie_edi._process_segment(code, elements, line, headers)
def test_process_segment_CCD1(mocker): code = "CCD" elements = { "CCDE": ["1", "f"], "TCOD": ["584867", "AAHEDC"], "TMOD": ["1", "2", "3", "4"], "MTNR": ["rikt8"], "MLOC": ["alskdj"], "PRDT": ["180901"], "PVDT": ["180801"], "NDRP": ["x"], "PRRD": ["9876", "00", 78649, "00"], "CONS": ["77", "kWh", "R"], "CONB": ["9850", "kWh", "R"], "ADJF": ["x", "688", "R"], "CONA": ["743", "kWh", "R"], "BPRI": ["895"], "NUCT": ["68349", "kWh", "R"], "CSDT": ["180901"], "CEDT": ["180801"], "CPPU": ["7003"], "CTOT": ["78679", "R"], "TSUP": ["jf"], "VATC": ["ig"], "VATP": ["77"], "MSAD": ["fk", "fdk"], } line = "" headers = {} chellow.bill_parser_engie_edi._process_segment(code, elements, line, headers) expected_headers = { "reads": [{ "msn": "rikt8", "mpan": " al skdj ", "coefficient": Decimal("0.00688"), "units": "kWh", "tpr_code": "1", "prev_date": utc_datetime(2018, 8, 1, 22, 30), "prev_value": Decimal("78649"), "prev_type_code": "N", "pres_date": utc_datetime(2018, 9, 1, 22, 30), "pres_value": Decimal("9876"), "pres_type_code": "N", }] } assert headers == expected_headers
def __next__(self): local_datum = None try: while local_datum is None: self.line_number, self.line = next(self.reader) lline = self.line.strip().upper() if lline.startswith("#O"): self.core = parse_mpan_core(lline[2:]) elif lline.startswith("#S"): sensor = int(lline[2:].strip()) try: self.channel_type = self.sensor_map[sensor] except KeyError: raise BadRequest( "The sensor number must be between 1 and 4 " "inclusive.") elif lline.startswith("#F2") or len(lline) == 0: continue else: fields = [f.strip() for f in lline.split(",")] if len(fields) != 4: raise BadRequest( "There should be 4 comma separated values, but I " "found " + str(len(fields)) + ".") d_day, d_month, d_year = map(int, fields[0].split("/")) time_fields = tuple(map(int, fields[1].split(":"))) if len(time_fields) > 2 and time_fields[2] != 0: raise BadRequest( "The number of seconds (if present) must always " "be zero.") start_date = ( utc_datetime(d_year, d_month, d_day, time_fields[0], time_fields[1]) - HH) try: value = decimal.Decimal(fields[2]) except ValueError: raise BadRequest("Problem parsing the value: " + fields[2]) status = fields[3][-1] local_datum = { "mpan_core": self.core, "channel_type": self.channel_type, "start_date": start_date, "value": value, "status": status, } return local_datum except BadRequest as e: e.description = "".join(( "Problem at line number: ", str(self.line_number), ": ", self.line, ": ", e.description, )) raise e
def test_make_site_deltas(mocker): era_1 = mocker.Mock() era_1.start_date = utc_datetime(2018, 1, 1) era_1.finish_date = None filter_returns = iter([[era_1], []]) class Sess: def query(self, *args): return self def join(self, *args): return self def filter(self, *args): return next(filter_returns) sess = Sess() report_context = {} site = mocker.Mock() site.code = "1" scenario_hh = {site.code: {"used": "2019-03-01 00:00, 0"}} forecast_from = utc_datetime(2019, 4, 1) supply_id = None ss = mocker.patch("chellow.computer.SiteSource", autospec=True) ss_instance = ss.return_value ss_instance.hh_data = [{ "start-date": utc_datetime(2019, 3, 1), "used-kwh": 0, "export-net-kwh": 0, "import-net-kwh": 0, "msp-kwh": 0, }] se = mocker.patch("chellow.reports.report_247.SiteEra", autospec=True) se.site = mocker.Mock() sup_s = mocker.patch("chellow.reports.report_247.SupplySource", autospec=True) sup_s_instance = sup_s.return_value sup_s_instance.hh_data = {} res = _make_site_deltas(sess, report_context, site, scenario_hh, forecast_from, supply_id) assert len(res["supply_deltas"][False]["net"]["site"]) == 0
def test_eras(mocker): """If there's a meter change, a pair can start after the end of the chunk. here we test the case for a pair before and after the chunk finish. """ sess = Session() year_start = utc_datetime(2010, 4, 1) year_finish = utc_datetime(2011, 3, 31, 23, 30) supply_id = None eras = chellow.reports.report_41._make_eras(sess, year_start, year_finish, supply_id) print(eras) desired = "".join( ("SELECT era.id AS era_id, era.supply_id AS era_supply_id, " "era.start_date AS era_start_date, " "era.finish_date AS era_finish_date, " "era.mop_contract_id AS era_mop_contract_id, " "era.mop_account AS era_mop_account, " "era.dc_contract_id AS era_dc_contract_id, " "era.dc_account AS era_dc_account, " "era.msn AS era_msn, " "era.pc_id AS era_pc_id, " "era.mtc_id AS era_mtc_id, " "era.cop_id AS era_cop_id, " "era.ssc_id AS era_ssc_id, " "era.energisation_status_id AS era_energisation_status_id, " "era.properties AS era_properties, " "era.imp_mpan_core AS era_imp_mpan_core, " "era.imp_llfc_id AS era_imp_llfc_id, " "era.imp_supplier_contract_id AS era_imp_supplier_contract_id, " "era.imp_supplier_account AS era_imp_supplier_account, " "era.imp_sc AS era_imp_sc, " "era.exp_mpan_core AS era_exp_mpan_core, " "era.exp_llfc_id AS era_exp_llfc_id, " "era.exp_supplier_contract_id AS era_exp_supplier_contract_id, " "era.exp_supplier_account AS era_exp_supplier_account, " "era.exp_sc AS era_exp_sc \n" "FROM era JOIN supply ON supply.id = era.supply_id " "JOIN source ON source.id = supply.source_id " "JOIN pc ON pc.id = era.pc_id \n" "WHERE era.start_date <= :start_date_1 AND " "(era.finish_date IS NULL OR era.finish_date >= :finish_date_1) " "AND source.code IN ([POSTCOMPILE_code_1]) AND pc.code = :code_2 " "ORDER BY supply.id")) print(desired) assert str(eras) == desired
def test_find_pair(mocker): sess = mocker.Mock() caches = {} is_forwards = True first_read = { "date": utc_datetime(2010, 1, 1), "reads": {}, "msn": "kh", "read_type": "N", } second_read = { "date": utc_datetime(2010, 2, 1), "reads": {}, "msn": "kh", "read_type": "N", } read_list = [first_read, second_read] pair = chellow.computer._find_pair(sess, caches, is_forwards, read_list) assert pair["start-date"] == utc_datetime(2010, 1, 1)
def test_make_reads_forwards_meter_change(mocker): is_forwards = True dt = utc_datetime(2018, 3, 1) read_a = {"date": dt, "msn": "a"} read_b = {"date": dt, "msn": "b"} prev_reads = iter([read_a]) pres_reads = iter([read_b]) actual = list( chellow.computer._make_reads(is_forwards, prev_reads, pres_reads)) expected = [read_b, read_a] assert actual == expected
def test_with_scenario(mocker, sess, client): mock_Thread = mocker.patch("chellow.reports.report_247.threading.Thread") properties = """{ "scenario_start_year": 2009, "scenario_start_month": 8, "scenario_duration": 1, "era_maps": { 2000-08-01T00:00:00Z: { "llfcs": { "22": { "new_export": "521" } }, "supplier_contracts": { "new_export": 10 } } }, "hh_data": { "CI017": { "generated": " 2009-08-01 00:00, 40 2009-08-15 00:00, 40" } } }""" scenario_props = loads(properties) scenario = Scenario.insert(sess, "New Gen", scenario_props) sess.commit() now = utc_datetime(2020, 1, 1) mocker.patch("chellow.reports.report_247.utc_datetime_now", return_value=now) site_code = "CI017" site = Site.insert(sess, site_code, "Water Works") data = { "site_id": site.id, "scenario_id": scenario.id, "compression": False, } response = client.post("/reports/247", data=data) match(response, 303) base_name = ["New Gen"] args = scenario_props, base_name, site.id, None, None, False, [], now mock_Thread.assert_called_with(target=content, args=args)
def test_process_CCD_1(mocker): msn = "hgkh" elements = { "CCDE": ["1", "", "NRG"], "TCOD": ["NIGHT", "Night"], "TMOD": ["453043"], "MTNR": [msn], "CONS": [[]], "BPRI": ["10"], "PRDT": ["200301"], "PVDT": ["200331"], "MLOC": [""], "PRRD": ["0", "00", "1", "00"], "ADJF": ["", "1"], } headers = { "reads": [], } chellow.bill_parser_haven_edi._process_CCD_1(elements, headers) expected_headers = { "reads": [ { "msn": msn, "mpan": " ", "coefficient": Decimal("0.00001"), "units": "kWh", "tpr_code": "453043", "prev_date": utc_datetime(2020, 3, 31, 22, 30), "prev_value": Decimal("1"), "prev_type_code": "N", "pres_date": utc_datetime(2020, 3, 1, 23, 30), "pres_value": Decimal("0"), "pres_type_code": "N", } ] } assert headers == expected_headers
def __next__(self): local_datum = None try: while local_datum is None: self.line_number, self.line = next(self.reader) lline = self.line.strip().upper() if lline.startswith("#O"): self.core = parse_mpan_core(lline[2:]) elif lline.startswith("#S"): sensor = int(lline[2:].strip()) try: self.channel_type = self.sensor_map[sensor] except KeyError: raise BadRequest( "The sensor number must be between 1 and 4 " "inclusive.") elif lline.startswith("#F2") or len(lline) == 0: continue else: fields = [f.strip() for f in lline.split(',')] if len(fields) != 4: raise BadRequest( "There should be 4 comma separated values, but I " "found " + str(len(fields)) + ".") d_day, d_month, d_year = map(int, fields[0].split('/')) time_fields = tuple(map(int, fields[1].split(':'))) if len(time_fields) > 2 and time_fields[2] != 0: raise BadRequest( "The number of seconds (if present) must always " "be zero.") start_date = utc_datetime( d_year, d_month, d_day, time_fields[0], time_fields[1]) - HH try: value = decimal.Decimal(fields[2]) except ValueError as e: raise BadRequest( "Problem parsing the value: " + fields[2]) status = fields[3][-1] local_datum = { 'mpan_core': self.core, 'channel_type': self.channel_type, 'start_date': start_date, 'value': value, 'status': status} return local_datum except BadRequest as e: e.description = ''.join( ( "Problem at line number: ", str(self.line_number), ": ", self.line, ": ", e.description)) raise e
def test_find_hhs_empty_pairs(mocker): mocker.patch("chellow.computer.is_tpr", return_value=True) caches = {} sess = mocker.Mock() pairs = [] chunk_start = utc_datetime(2010, 1, 1) chunk_finish = utc_datetime(2010, 1, 1) hhs = chellow.computer._find_hhs(caches, sess, pairs, chunk_start, chunk_finish) assert hhs == { utc_datetime(2010, 1, 1): { "msp-kw": 0, "msp-kwh": 0, "hist-kwh": 0, "imp-msp-kvar": 0, "imp-msp-kvarh": 0, "exp-msp-kvar": 0, "exp-msp-kvarh": 0, "tpr": "00001", } }
def test_find_hhs_empty_pairs(mocker): mocker.patch("chellow.computer.is_tpr", return_value=True) caches = {} sess = mocker.Mock() pairs = [] chunk_start = utc_datetime(2010, 1, 1) chunk_finish = utc_datetime(2010, 1, 1) hhs = chellow.computer._find_hhs(caches, sess, pairs, chunk_start, chunk_finish) assert hhs == { utc_datetime(2010, 1, 1): { 'msp-kw': 0, 'msp-kwh': 0, 'hist-kwh': 0, 'imp-msp-kvar': 0, 'imp-msp-kvarh': 0, 'exp-msp-kvar': 0, 'exp-msp-kvarh': 0, 'tpr': '00001' } }
def test_http_supplier_batch_with_mpan_cores(mocker, client, sess): market_role_X = MarketRole.insert(sess, "X", "Supplier") participant = Participant.insert(sess, "hhak", "AK Industries") participant.insert_party(sess, market_role_X, "Fusion Ltc", utc_datetime(2000, 1, 1), None, None) supplier_contract = Contract.insert_supplier( sess, "Fusion Supplier 2000", participant, "", {}, utc_datetime(2000, 1, 1), None, {}, ) batch = supplier_contract.insert_batch(sess, "005", "batch 5") sess.commit() MockThread = mocker.patch("chellow.reports.report_111.threading.Thread") data = { "batch_id": str(batch.id), "mpan_cores": "22 1065 3921 534", } response = client.get("/reports/111", data=data) match(response, 303) expected_args = ( batch.id, None, None, None, None, None, ["22 1065 3921 534"], "_batch_005", ) MockThread.assert_called_with(target=chellow.reports.report_111.content, args=expected_args)
def vb(ds): rate_set = ds.rate_sets['ccl_rate'] if ds.g_supply.find_g_era_at(ds.sess, ds.finish_date + HH) is None: sup_end = ds.finish_date else: sup_end = None if ds.g_bill is None: for hh in ds.hh_data: if hh['utc-is-month-end'] or hh['start-date'] == sup_end: month_finish = hh['start-date'] kwh = 0 gbp = 0 month_start = utc_datetime( month_finish.year, month_finish.month, 1) for dsr in chellow.computer.get_data_sources( ds, month_start, month_finish): for datum in dsr.hh_data: rate = float( ds.file_rate( 'g_ccl', datum['start_date'], 'ccl_gbp_per_kwh')) rate_set.add(rate) kwh += datum['kwh'] gbp += datum['kwh'] * rate if kwh > THRESHOLD: hh['ccl_kwh'] = kwh hh['ccl_gbp'] = gbp elif ds.is_last_g_bill_gen: kwh = 0 gbp = 0 for ds in chellow.computer.get_data_sources( ds, ds.g_bill_start, ds.g_bill_finish): for hh in ds.hh_data: rate = float( ds.file_rate('g_ccl', hh['start_date'], 'ccl_gbp_per_kwh')) rate_set.add(rate) kwh += hh['kwh'] gbp += hh['kwh'] * rate if kwh > THRESHOLD: ds.hh_data[-1]['ccl_kwh'] = kwh ds.hh_data[-1]['ccl_gbp'] = gbp
def test_find_hhs_pairs_before_after_chunk_finish(mocker): """If there's a meter change, a pair can start after the end of the chunk. here we test the case for a pair before and after the chunk finish. """ mocker.patch("chellow.g_engine.find_cv", return_value=(39, 39)) sess = mocker.Mock() caches = {} hist_g_era = mocker.Mock() hist_g_era.correction_factor = 1 hist_g_era.g_unit = mocker.Mock() hist_g_era.g_unit.code = "M3" hist_g_era.g_unit.factor = 1 pairs = [ { "start-date": utc_datetime(2010, 1, 1), "units": 1 }, { "start-date": utc_datetime(2010, 1, 1, 0, 30), "units": 1 }, ] chunk_start = utc_datetime(2010, 1, 1) chunk_finish = utc_datetime(2010, 1, 1) g_cv_id = 0 g_ldz_code = "SW" hhs = chellow.g_engine._find_hhs(sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code) assert hhs == { utc_datetime(2010, 1, 1): { "unit_code": "M3", "unit_factor": 1.0, "units_consumed": 1, "correction_factor": 1.0, "calorific_value": 39, "avg_cv": 39, } } assert pairs == [ { "start-date": utc_datetime(2010, 1, 1), "units": 1, "finish-date": utc_datetime(2010, 1, 1), }, ]
def test_batch_http(mocker, sess, client): g_contract = GContract.insert(sess, "Fusion 2020", "", {}, utc_datetime(2000, 1, 1), None, {}) g_batch = g_contract.insert_g_batch(sess, "b1", "Jan batch") sess.commit() data = {"g_batch_id": g_batch.id} mock_Thread = mocker.patch("chellow.reports.report_429.threading.Thread", autospec=True) response = client.get("/reports/429", data=data) match(response, 303) user = None args = (g_batch.id, None, user) mock_Thread.assert_called_with(target=chellow.reports.report_429.content, args=args)
def test_parse_row(mocker): row = [] for val in [ "Power Comany Ltd.", "Bill Paja", "556", "BILL PAJA", "883", "1 True Way", "672770", 43555.00, 43555.00, "", "2019-03-01 - 2019-03-31", "", "", "", "Draft", "", "Product", "Hand Held Read -", 43555.00, 43555.00, "", "2299999999929", "", "", "", "785", "GBP", "INV", "", "", ]: cell = mocker.Mock(spec=xlrd.sheet.Cell) cell.value = val row.append(cell) row_index = 2 datemode = 0 title_row = ["To Date"] bill = chellow.bill_parser_engie_xls._parse_row(row, row_index, datemode, title_row) assert bill["finish_date"] == utc_datetime(2019, 3, 31, 22, 30)
def test_process_g_bill_ids(mocker): sess = mocker.Mock() forecast_date = utc_datetime(2010, 4, 1) query = mocker.Mock() sess.query.return_value = query m_filter = mocker.Mock() query.filter.return_value = m_filter g_bill = mocker.Mock() m_filter.one.return_value = g_bill g_bill.g_reads = [] m_filter.order_by.return_value = [] g_bill.g_supply = mocker.Mock() g_bill.start_date = forecast_date g_bill.finish_date = forecast_date MockGBill = mocker.patch("chellow.reports.report_429.GBill", autospec=True) MockGBill.g_supply = mocker.Mock() MockGBill.start_date = forecast_date MockGBill.finish_date = forecast_date find_g_era_at = g_bill.g_supply.find_g_era_at report_context = {} g_bill_ids = [1] bill_titles = [] vbf = mocker.Mock() titles = [] csv_writer = mocker.Mock() chellow.reports.report_429._process_g_bill_ids( sess, report_context, g_bill_ids, forecast_date, bill_titles, vbf, titles, csv_writer, ) find_g_era_at.assert_not_called()
def test_process_segment_CCD3_ro(mocker): code = "CCD" elements = { "CCDE": ["3", "ADD"], "TCOD": ["425779", "RO Mutualisation"], "TMOD": [], "MTNR": [], "MLOC": ["22767395756734"], "PRDT": [], "PVDT": [], "NDRP": [], "PRRD": [], "CONS": ["", ""], "CONB": [], "ADJF": ["UG"], "CONA": [], "BPRI": ["974"], "NUCT": ["877457492", "KWH"], "CSDT": ["191001"], "CEDT": ["191101"], "CPPU": ["748"], "CTOT": ["76981"], } line = "" issue_date = utc_datetime(2019, 9, 3) reference = "hgtuer8" headers = { "issue_date": issue_date, "reference": reference, "bill_type_code": "N" } chellow.bill_parser_engie_edi._process_segment(code, elements, line, headers) expected_headers = { "mpan_core": "22 7673 9575 6734", "bill_start_date": to_utc(ct_datetime(2019, 10, 1)), "bill_finish_date": to_utc(ct_datetime(2019, 10, 31, 23, 30)), "issue_date": issue_date, "reference": reference, "bill_type_code": "N", } assert headers == expected_headers
def test_find_hhs_pairs_before_after_chunk_finish(mocker): """ If there's a meter change, a pair can start after the end of the chunk. here we test the case for a pair before and after the chunk finish. """ mocker.patch("chellow.g_engine.find_cv", return_value=(39, 39)) sess = mocker.Mock() caches = {} hist_g_era = mocker.Mock() hist_g_era.correction_factor = 1 hist_g_era.g_unit = mocker.Mock() hist_g_era.g_unit.code = 'M3' hist_g_era.g_unit.factor = 1 pairs = [{ 'start-date': utc_datetime(2010, 1, 1), 'units': 1 }, { 'start-date': utc_datetime(2010, 1, 1, 0, 30), 'units': 1 }] chunk_start = utc_datetime(2010, 1, 1) chunk_finish = utc_datetime(2010, 1, 1) g_cv_id = 0 g_ldz_code = 'SW' hhs = chellow.g_engine._find_hhs(sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code) assert hhs == { utc_datetime(2010, 1, 1): { 'unit_code': 'M3', 'unit_factor': 1.0, 'units_consumed': 1, 'correction_factor': 1.0, 'calorific_value': 39, 'avg_cv': 39 } } assert pairs == [ { 'start-date': utc_datetime(2010, 1, 1), 'units': 1, 'finish-date': utc_datetime(2010, 1, 1) }, ]
def __next__(self): datum = None try: while datum is None: if self.col_idx > 50: self.line_number, self.values = next(self.shredder) if len(self.values) == 0: continue self.col_idx = 0 if self.col_idx == 0: try: self.core = self.values[self.col_idx] except KeyError: raise BadRequest( "There doesn't seem to be an MPAN Core at the " "beginning of this line. ") self.core = self.mpan_map.get(self.core, self.core) self.core = parse_mpan_core(self.core) elif self.col_idx == 2: day, month, year = map( int, self.values[self.col_idx].split("/")) self.date = utc_datetime(year, month, day) elif 2 < self.col_idx < len(self.values): hh_value = self.values[self.col_idx].strip() mins = 30 * (self.col_idx - 3) if len(hh_value) > 0: datum = { "mpan_core": self.core, "channel_type": "ACTIVE", "start_date": self.date + timedelta(minutes=mins), "value": decimal.Decimal(hh_value), "status": "A", } self.col_idx += 1 except BadRequest as e: e.description = "".join("Problem at line number: ", str(self.line_number), ": ", e.description) raise e return datum
def __next__(self): datum = None try: while datum is None: if self.col_idx > 50: self.line_number, self.values = next(self.shredder) if len(self.values) == 0: continue self.col_idx = 0 if self.col_idx == 0: try: self.core = self.values[self.col_idx] except KeyError: raise BadRequest( "There doesn't seem to be an MPAN Core at the " "beginning of this line. ") self.core = self.mpan_map.get(self.core, self.core) self.core = parse_mpan_core(self.core) elif self.col_idx == 2: day, month, year = map( int, self.values[self.col_idx].split('/')) self.date = utc_datetime(year, month, day) elif 2 < self.col_idx < len(self.values): hh_value = self.values[self.col_idx].strip() mins = 30 * (self.col_idx - 3) if len(hh_value) > 0: datum = { 'mpan_core': self.core, 'channel_type': 'ACTIVE', 'start_date': self.date + timedelta(minutes=mins), 'value': decimal.Decimal(hh_value), 'status': 'A'} self.col_idx += 1 except BadRequest as e: e.description = ''.join( "Problem at line number: ", str(self.line_number), ": ", e.description) raise e return datum
days_in_month += 1 excess_kva = max(md_kva - ds.sc, 0) if 'excess-gbp-per-kva-per-day' in tariff: rate = tariff['excess-gbp-per-kva-per-day'] ds.supplier_rate_sets['duos-excess-availability-kva'].add( excess_kva) rate = tariff['excess-gbp-per-kva-per-day'] ds.supplier_rate_sets['duos-excess-availability-rate'].add(rate) bill['duos-excess-availability-days'] += days_in_month bill['duos-excess-availability-gbp'] += rate * excess_kva * \ days_in_month CUTOFF_DATE = utc_datetime(2010, 4) def duos_vb(ds): try: dno_caches = ds.caches['dno'] except KeyError: ds.caches['dno'] = {} dno_caches = ds.caches['dno'] try: dno_cache = dno_caches[ds.dno_contract.name] except KeyError: dno_caches[ds.dno_contract.name] = {} dno_cache = dno_caches[ds.dno_contract.name]
def hh(data_source, rate_period='monthly'): for hh in (h for h in data_source.hh_data if h['ct-is-month-end']): hh_start = hh['start-date'] month_start = utc_datetime(hh_start.year, hh_start.month) month_finish = month_start + relativedelta(months=1) - HH month_num = month_start.month # Get start of last financial year financial_year_start = month_start while financial_year_start.month != 4: financial_year_start -= relativedelta(months=1) last_financial_year_start = financial_year_start - \ relativedelta(years=1) financial_year_finish = financial_year_start + \ relativedelta(years=1) - HH triad_dates = [] earliest_triad = None for dt in data_source.hh_rate( db_id, last_financial_year_start, 'triad_dates'): triad_dates.append(dt + relativedelta(years=1)) if earliest_triad is None or dt < earliest_triad: earliest_triad = dt est_triad_kws = [] for t_date in triad_dates: for ds in chellow.computer.get_data_sources( data_source, t_date, t_date, financial_year_start): chellow.duos.duos_vb(ds) est_triad_kws.append(ds.hh_data[0]) if data_source.site is None: era = data_source.supply.find_era_at( data_source.sess, earliest_triad) if era is None or era.get_channel( data_source.sess, data_source.is_import, 'ACTIVE') is None: est_triad_kw = 0.85 * max( datum['msp-kwh'] for datum in data_source.hh_data) * 2 for est_datum in est_triad_kws: est_datum['msp-kw'] = est_triad_kw est_datum['gsp-kw'] = est_datum['msp-kw'] * \ est_datum['laf'] triad_calc( hh, 'triad-estimate', est_triad_kws, financial_year_start, financial_year_finish, data_source, month_start) est_triad_gbp = hh['triad-estimate-rate'] * hh['triad-estimate-gsp-kw'] if rate_period == 'monthly': total_intervals = 12 est_intervals = 1 hh['triad-estimate-months'] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for ds in chellow.computer.get_data_sources( data_source, month_start, month_finish): for h in ds.hh_data: if h['utc-decimal-hour'] == 0: est_intervals += 1 hh['triad-estimate-days'] = est_intervals hh['triad-estimate-gbp'] = est_triad_gbp / total_intervals * \ est_intervals if month_num == 3: triad_kws = [] for t_date in data_source.hh_rate( db_id, month_start, 'triad_dates'): try: ds = next( iter( chellow.computer.get_data_sources( data_source, t_date, t_date))) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh = ds.hh_data[0] else: thh = { 'hist-start': t_date, 'msp-kw': 0, 'status': 'before contract', 'laf': 'before contract', 'gsp-kw': 0} except StopIteration: thh = { 'hist-start': t_date, 'msp-kw': 0, 'status': 'before start of supply', 'laf': 'before start of supply', 'gsp-kw': 0} triad_kws.append(thh) triad_calc( hh, 'triad-actual', triad_kws, financial_year_start, financial_year_finish, data_source, month_start) hh['triad-actual-gbp'] = hh['triad-actual-rate'] * \ hh['triad-actual-gsp-kw'] era = data_source.supply.find_era_at( data_source.sess, month_finish) est_intervals = 0 interval = relativedelta(months=1) if \ rate_period == 'monthly' else relativedelta(days=1) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = data_source.supply.find_era_at(data_source.sess, dt) if rate_period == 'monthly': hh['triad-all-estimates-months'] = est_intervals else: hh['triad-all-estimates-days'] = est_intervals hh['triad-all-estimates-gbp'] = est_triad_gbp / \ total_intervals * est_intervals * -1
def forecast_date(): now = utc_datetime_now() return utc_datetime(now.year, now.month, 1)
def ccl(data_source, ct_month=False): rate_set = data_source.supplier_rate_sets['ccl-rate'] if data_source.supply.find_era_at( data_source.sess, data_source.finish_date + HH) is None: sup_end = data_source.finish_date else: sup_end = None try: cache = data_source.caches['ccl'] except: data_source.caches['ccl'] = {} cache = data_source.caches['ccl'] try: future_funcs = data_source.caches['future_funcs'] except KeyError: future_funcs = {} data_source.caches['future_funcs'] = future_funcs try: future_funcs[ccl_contract_id] except KeyError: future_funcs[ccl_contract_id] = { 'start_date': None, 'func': create_future_func(1, 0)} if data_source.bill is None: for hh in data_source.hh_data: if hh['ct-is-month-end'] or hh['start-date'] == sup_end: finish_year = hh['start-date'].year finish_month = hh['start-date'].month kwh = 0 gbp = 0 if ct_month: month_start = to_utc( ct_datetime(finish_year, finish_month)) month_finish = hh['start-date'] else: month_start = utc_datetime(finish_year, finish_month) month_finish = month_start + relativedelta(months=1) - HH for ds in chellow.computer.get_data_sources( data_source, month_start, month_finish): for datum in ds.hh_data: try: rate = cache[datum['start-date']] except KeyError: cache[datum['start-date']] = data_source.hh_rate( ccl_contract_id, datum['start-date'], 'ccl_rate') rate = cache[datum['start-date']] rate_set.add(rate) kwh += datum['msp-kwh'] gbp += datum['msp-kwh'] * rate if kwh > 999: hh['ccl-kwh'] = kwh hh['ccl-gbp'] = gbp elif data_source.is_last_bill_gen: kwh = 0 gbp = 0 for ds in chellow.computer.get_data_sources( data_source, data_source.bill_start, data_source.bill_finish): for hh in ds.hh_data: try: rate = cache[hh['start-date']] except KeyError: cache[hh['start-date']] = data_source.hh_rate( ccl_contract_id, hh['start-date'], 'ccl_rate') rate = cache[hh['start-date']] rate_set.add(rate) kwh += hh['msp-kwh'] gbp += hh['msp-kwh'] * rate hhs = ( data_source.bill_finish - data_source.bill_start).total_seconds() if (kwh / hhs) > ((1000 * 12) / (365 * 24 * 60 * 60)): data_source.hh_data[-1]['ccl-kwh'] = kwh data_source.hh_data[-1]['ccl-gbp'] = gbp
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check bank holidays") contract = Contract.get_non_core_by_name( sess, 'bank_holidays') contract_props = contract.make_properties() if contract_props.get('enabled', False): url_str = contract_props['url'] self.log("Downloading from " + url_str + ".") res = requests.get(url_str) self.log( ' '.join( ( "Received", str(res.status_code), res.reason))) PREFIX = 'DTSTART;VALUE=DATE:' hols = collections.defaultdict(list) for line in res.text.splitlines(): if line.startswith(PREFIX): dt = utc_datetime_parse(line[-8:], "%Y%m%d") hols[dt.year].append(dt) for year in sorted(hols.keys()): year_start = utc_datetime(year, 1, 1) year_finish = year_start + \ relativedelta(years=1) - HH rs = sess.query(RateScript).filter( RateScript.contract == contract, RateScript.start_date == year_start).first() if rs is None: self.log( "Adding a new rate script starting at " + hh_format(year_start) + ".") latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).\ order_by(RateScript.start_date.desc()). \ first() contract.update_rate_script( sess, latest_rs, latest_rs.start_date, year_finish, latest_rs.script) rs = contract.insert_rate_script( sess, year_start, '') script = { 'bank_holidays': [ v.strftime("%Y-%m-%d") for v in hols[year]]} self.log( "Updating rate script starting at " + hh_format(year_start) + ".") contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, json.dumps( script, indent=' ', sort_keys=True)) sess.commit() else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking bank holidays.") self.going.wait(24 * 60 * 60) self.going.clear()
def content( scenario_props, scenario_id, base_name, site_id, supply_id, user, compression): now = utc_datetime_now() report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = to_utc(rate_start) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = utc_datetime(now.year, now.month, 1) else: start_date = to_utc(start_date) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = to_utc(kwh_start) sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] era_rows = [] changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = to_utc(Datetime.strptime(date_str.strip(), "%Y-%m-%d")) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) era_header_titles = [ 'creation-date', 'imp-mpan-core', 'imp-supplier-contract', 'exp-mpan-core', 'exp-supplier-contract', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'creation-date', 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr).join(Era.supply). \ join(Source).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \ join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date) ).order_by(Tpr.code).distinct() for tpr in tpr_query.filter(Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['imp-supplier'].append(tpr.code + suffix) for tpr in tpr_query.filter(Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['exp-supplier'].append(tpr.code + suffix) era_rows.append( era_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) site_rows.append(site_header_titles + summary_titles) sites = sites.all() month_start = start_date print("start date", start_date, "finish date", finish_date) while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) calcs = [] deltas = defaultdict(int) for era in sess.query(Era).join(SiteEra).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)).options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras)): supply = era.supply if supply.generator_type is not None: site_gen_types.add(supply.generator_type.code) if supply_id is not None and supply.id != supply_id: continue if era.start_date > month_start: ss_start = era.start_date else: ss_start = month_start if hh_before(era.finish_date, month_finish): ss_finish = era.finish_date else: ss_finish = month_finish if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, report_context, site, month_start, month_finish, kwh_start) site_ds = chellow.computer.SiteSource( sess, site, month_start, month_finish, kwh_start, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill') if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += 'For the supply ' + \ site_ds.mpan_core + ' the virtual bill ' + \ str(disp_supplier_bill) + ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ now, None, disp_supplier_contract.name, None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, '', month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ None] * len(title_dict['mop']) + [None] + [ None] * len(title_dict['dc']) + [None] + make_bill_row( title_dict['imp-supplier'], disp_supplier_bill) era_rows.append(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data) import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill') if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: gbp = 0 imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['import-net-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh if source_code == 'gen-net': month_data['export-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == 'gen': month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is not None: kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill') export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['export-net-gbp'] += gbp month_data['export-net-kwh'] += kwh if source_code == 'gen-net': month_data['import-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == '3rd-party-reverse': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func(dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp month_data['used-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None era_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[era_category]: site_category = era_category era_associates = { s.site.code for s in era.site_eras if not s.is_physical} for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ now, era.imp_mpan_core, ( None if imp_supplier_contract is None else imp_supplier_contract.name), era.exp_mpan_core, ( None if exp_supplier_contract is None else exp_supplier_contract.name), era_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(era_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + \ make_bill_row(title_dict['mop'], mop_bill) + [None] + \ make_bill_row(title_dict['dc'], dc_bill) if imp_supplier_contract is None: out += [None] * (len(title_dict['imp-supplier']) + 1) else: out += [None] + make_bill_row( title_dict['imp-supplier'], imp_supplier_bill) if exp_supplier_contract is not None: out += [None] + make_bill_row( title_dict['exp-supplier'], exp_supplier_bill) for k, v in month_data.items(): site_month_data[k] += v era_rows.append(out) site_rows.append( [ now, site.code, site.name, ', '.join( s.code for s in site.find_linked_sites( sess, month_start, month_finish)), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) write_spreadsheet(rf, compression, site_rows, era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()