def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = self.global_alert = None try: sess = Session() self.log("Starting to check bmarketidx.") contract = Contract.get_non_core_by_name(sess, "bmarketidx") latest_rs = ( sess.query(RateScript) .filter(RateScript.contract_id == contract.id) .order_by(RateScript.start_date.desc()) .first() ) start_ct = to_ct(latest_rs.start_date) months = list( c_months_u( start_year=start_ct.year, start_month=start_ct.month, months=2, ) ) month_start, month_finish = months[1] now = utc_datetime_now() if now > month_finish: _process_month( self.log, sess, contract, latest_rs, month_start, month_finish, ) except BaseException: self.log(f"Outer problem {traceback.format_exc()}") sess.rollback() self.global_alert = ( "There's a problem with the " "bmarketidx automatic importer." ) finally: self.lock.release() self.log("Finished checking bmarketidx rates.") if sess is not None: sess.close() self.going.wait(2 * 60 * 60) self.going.clear()
def content(base_name, site_id, g_supply_id, user, compression, start_date, months): now = utc_datetime_now() report_context = {} sess = None try: sess = Session() base_name.append( hh_format(start_date).replace(' ', '_').replace(':', '').replace('-', '')) base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) forecast_from = chellow.computer.forecast_date() sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if g_supply_id is not None: g_supply = GSupply.get_by_id(sess, g_supply_id) base_name.append('g_supply') base_name.append(str(g_supply.id)) sites = sites.join(SiteGEra).join(GEra).filter( GEra.g_supply == g_supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] g_era_rows = [] era_header_titles = [ 'creation_date', 'mprn', 'supply_name', 'exit_zone', 'msn', 'unit', 'contract', 'site_id', 'site_name', 'associated_site_ids', 'month' ] site_header_titles = [ 'creation_date', 'site_id', 'site_name', 'associated_site_ids', 'month' ] summary_titles = ['kwh', 'gbp', 'billed_kwh', 'billed_gbp'] vb_titles = [] conts = sess.query(GContract).join(GEra).join(GSupply).filter( GEra.start_date <= finish_date, or_(GEra.finish_date == null(), GEra.finish_date >= start_date)).distinct().order_by( GContract.id) if g_supply_id is not None: conts = conts.filter(GEra.g_supply_id == g_supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception("For the contract " + cont.name + " there doesn't seem " + "to be a 'virtual_bill_titles' function.") for title in title_func(): if title not in vb_titles: vb_titles.append(title) g_era_rows.append(era_header_titles + summary_titles + vb_titles) site_rows.append(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0 for g_era in sess.query(GEra).join(SiteGEra).filter( SiteGEra.site == site, SiteGEra.is_physical == true(), GEra.start_date <= month_finish, or_(GEra.finish_date == null(), GEra.finish_date >= month_start)).options( joinedload(GEra.g_contract), joinedload(GEra.g_supply), joinedload(GEra.g_supply).joinedload( GSupply.g_exit_zone)).order_by(GEra.id): g_supply = g_era.g_supply if g_supply_id is not None and g_supply.id != g_supply_id: continue ss_start = hh_max(g_era.start_date, month_start) ss_finish = hh_min(g_era.finish_date, month_finish) ss = GDataSource(sess, ss_start, ss_finish, forecast_from, g_era, report_context, None) contract = g_era.g_contract vb_function = contract_func(report_context, contract, 'virtual_bill') if vb_function is None: raise BadRequest( "The contract " + contract.name + " doesn't have the virtual_bill() function.") vb_function(ss) bill = ss.bill try: gbp = bill['net_gbp'] except KeyError: gbp = 0 bill['problem'] += 'For the supply ' + ss.mprn + \ ' the virtual bill ' + str(bill) + \ ' from the contract ' + contract.name + \ ' does not contain the net_gbp key.' try: kwh = bill['kwh'] except KeyError: kwh = 0 bill['problem'] += "For the supply " + ss.mprn + \ " the virtual bill " + str(bill) + \ " from the contract " + contract.name + \ " does not contain the 'kwh' key." billed_kwh = billed_gbp = 0 g_era_associates = { s.site.code for s in g_era.site_g_eras if not s.is_physical } for g_bill in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= ss_finish, GBill.finish_date >= ss_start): bill_start = g_bill.start_date bill_finish = g_bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = (min(bill_finish, ss_finish) - max( bill_start, ss_start)).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / bill_duration billed_kwh += overlap_proportion * float(g_bill.kwh) billed_gbp += overlap_proportion * float(g_bill.net) associated_site_ids = ','.join(sorted(g_era_associates)) g_era_rows.append([ now, g_supply.mprn, g_supply.name, g_supply.g_exit_zone .code, g_era.msn, g_era.g_unit.code, contract.name, site.code, site.name, associated_site_ids, month_finish, kwh, gbp, billed_kwh, billed_gbp ] + [make_val(bill.get(t)) for t in vb_titles]) site_kwh += kwh site_gbp += gbp site_billed_kwh += billed_kwh site_billed_gbp += billed_gbp linked_sites = ', '.join(s.code for s in site.find_linked_sites( sess, month_start, month_finish)) site_rows.append([ now, site.code, site.name, linked_sites, month_finish, site_kwh, site_gbp, site_billed_kwh, site_billed_gbp ]) sess.rollback() write_spreadsheet(rf, compression, site_rows, g_era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, g_era_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, g_era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def now_if_none(dt): return utc_datetime_now() if dt is None else dt
def forecast_date(): now = utc_datetime_now() return utc_datetime(now.year, now.month, 1)
def _parse_row(row, row_index, datemode, title_row): titles = [c.value for c in title_row] bill_start_date = get_date_ct(row, titles, "BILL START DATE") bill_finish_date = get_date_ct(row, titles, "BILL END DATE") + (HH * 47) issue_date = utc_datetime_now() aahedc_kwh = get_dec(row, titles, "LVY-AAHEDC-ALL USE KWH") vals = [ ("capmechob-kwh", get_dec(row, titles, "LVY-CMLOB-ALL USE KWH")), ("capmechob-rate", get_dec(row, titles, "LVY-CMLOB-ALL RATE P/KWH")), ("capmechob-gbp", get_dec(row, titles, "LVY-CMLOB-ALL COST GBP")), ("capmechop-kwh", get_dec(row, titles, "LVY-CMLOP-ALL USE KWH")), ("capmechop-rate", get_dec(row, titles, "LVY-CMLOP-ALL RATE P/KWH")), ("capmechop-gbp", get_dec(row, titles, "LVY-CMLOP-ALL COST GBP")), ("duos-fixed-days", get_dec(row, titles, "DUOS-STND-SITE USE DAY")), ( "duos-availability-kva-days", [get_dec(row, titles, "DUOS-AVAIL-AV USE KVADAY")], ), ( "duos-availability-rate", get_rate(row, titles, "DUOS-AVAIL-AV RATE P/KVADAY"), ), ("duos-availability-gbp", get_dec(row, titles, "DUOS-AVAIL-AV COST GBP")), ("duos-fixed-rate", get_rate(row, titles, "DUOS-STND-SITE RATE P/DAY")), ("duos-fixed-gbp", get_dec(row, titles, "DUOS-STND-SITE COST GBP")), ("duos-amber-kwh", get_dec(row, titles, "DUOS-UNIT-AMBER USE KWH")), ("duos-amber-rate", get_rate(row, titles, "DUOS-UNIT-AMBER RATE P/KWH")), ("duos-amber-gbp", get_dec(row, titles, "DUOS-UNIT-AMBER COST GBP")), ("duos-green-kwh", get_dec(row, titles, "DUOS-UNIT-GREEN USE KWH")), ("duos-green-rate", get_rate(row, titles, "DUOS-UNIT-GREEN RATE P/KWH")), ("duos-green-gbp", get_dec(row, titles, "DUOS-UNIT-GREEN COST GBP")), ("duos-red-kwh", get_dec(row, titles, "DUOS-UNIT-RED USE KWH")), ("duos-red-rate", get_rate(row, titles, "DUOS-UNIT-RED RATE P/KWH")), ("duos-red-gbp", get_dec(row, titles, "DUOS-UNIT-RED COST GBP")), ("aahedc-kwh", aahedc_kwh), ("aahedc-rate", get_rate(row, titles, "LVY-AAHEDC-ALL RATE P/KWH")), ("aahedc-gbp", get_dec(row, titles, "LVY-AAHEDC-ALL COST GBP")), ("bsuos-nbp-kwh", get_dec(row, titles, "LVY-BSUOS-ALL USE KWH")), ("bsuos-rate", get_rate(row, titles, "LVY-BSUOS-ALL RATE P/KWH")), ("bsuos-gbp", get_dec(row, titles, "LVY-BSUOS-ALL COST GBP")), ("ccl-kwh", get_dec(row, titles, "LVY-CCL-ALL USE KWH")), ("ccl-rate", get_rate(row, titles, "LVY-CCL-ALL RATE P/KWH")), ("ccl-gbp", get_dec(row, titles, "LVY-CCL-ALL COST GBP")), ("cfdob-kwh", get_dec(row, titles, "LVY-CFDOB-ALL USE KWH")), ("cfdob-rate", get_rate(row, titles, "LVY-CFDOB-ALL RATE P/KWH")), ("cfdob-gbp", get_dec(row, titles, "LVY-CFDOB-ALL COST GBP")), ("cfdop-kwh", get_dec(row, titles, "LVY-CFDOP-ALL USE KWH")), ("cfdop-rate", get_rate(row, titles, "LVY-CFDOP-ALL RATE P/KWH")), ("cfdop-gbp", get_dec(row, titles, "LVY-CFDOP-ALL COST GBP")), ("fit-kwh", get_dec(row, titles, "LVY-FIT-ALL USE KWH")), ("fit-rate", get_rate(row, titles, "LVY-FIT-ALL RATE P/KWH")), ("fit-gbp", get_dec(row, titles, "LVY-FIT-ALL COST GBP")), ("ro-kwh", get_dec(row, titles, "LVY-RO-ALL USE KWH")), ("ro-rate", get_rate(row, titles, "LVY-RO-ALL RATE P/KWH")), ("ro-gbp", get_dec(row, titles, "LVY-RO-ALL COST GBP")), ("summer-kwh", get_dec(row, titles, "NRG-UNIT-SUMMER USE KWH")), ("summer-rate", get_rate(row, titles, "NRG-UNIT-SUMMER RATE P/KWH")), ("summer-gbp", get_dec(row, titles, "NRG-UNIT-SUMMER COST GBP")), ("winter-kwh", get_dec(row, titles, "NRG-UNIT-WINTER USE KWH")), ("winter-rate", get_rate(row, titles, "NRG-UNIT-WINTER RATE P/KWH")), ("winter-gbp", get_dec(row, titles, "NRG-UNIT-WINTER COST GBP")), ("admin-months", get_dec(row, titles, "SYS-ADMIN-ALL USE MO")), ("admin-rate", [get_dec(row, titles, "SYS-ADMIN-ALL RATE GBP/MO")]), ("admin-gbp", get_dec(row, titles, "SYS-ADMIN-ALL COST GBP")), ("triad-days", get_dec(row, titles, "TUOS-TRIAD-SITE USE DAY")), ("triad-rate", [get_dec(row, titles, "TUOS-TRIAD-SITE RATE GBP/DAY")]), ("triad-gbp", get_dec(row, titles, "TUOS-TRIAD-SITE COST GBP")), ] bd = { "raw_lines": [str(title_row), str(row)], } for k, v in vals: if v is not None and v != [None]: bd[k] = v kwh = Decimal("0.00") if aahedc_kwh is None else aahedc_kwh net_gbp = Decimal("0.00") net_gbp += sum(v for k, v in bd.items() if k.endswith("-gbp")) vat_gbp = Decimal("0.00") reference = issue_date.strftime("%Y%m%dT%H%M") + "_" + str(row_index + 1) return { "bill_type_code": "N", "kwh": kwh, "vat": Decimal("0.00"), "net": net_gbp, "gross": net_gbp + vat_gbp, "reads": [], "breakdown": bd, "account": "2007", "issue_date": issue_date, "start_date": bill_start_date, "finish_date": bill_finish_date, "mpan_core": "22 0003 0354 632", "reference": reference, }
def run_inner(self, sess): self.log("Starting to check GCv rates.") contract = Contract.get_non_core_by_name(sess, 'g_cv') latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id this_month_start = latest_rs.start_date + relativedelta(months=1) next_month_start = this_month_start + relativedelta(months=1) now = utc_datetime_now() props = contract.make_properties() if props.get('enabled', False): search_finish = next_month_start + relativedelta(days=1) if now > search_finish: url = props['url'] self.log( "Checking to see if data is available from " + hh_format(this_month_start) + " to " + hh_format(search_finish) + " at " + url) res = requests.post( url, data={ 'LatestValue': 'true', 'PublicationObjectIds': '408:12265,+408:4636,+408:4637,+408:4639,' '+408:4638,+408:4640,+408:4641,+408:4642,' '+408:4643,+408:4644,+408:4645,+408:4646,' '+408:4647,+408:4648,+408:12269,+408:12268,' '+408:12270,+408:12266,+408:12267', 'Applicable': 'applicableFor', 'PublicationObjectCount': '19', 'FromUtcDatetime': param_format(this_month_start), 'ToUtcDateTime': param_format(search_finish), 'FileType': 'Csv'}) self.log("Received " + str(res.status_code) + " " + res.reason) month_cv = defaultdict(dict) cf = csv.reader(res.text.splitlines()) row = next(cf) # Skip title row for row in cf: applicable_for_str = row[1] data_item = row[2] value_str = row[3] if 'LDZ' in data_item: applicable_for = to_utc( Datetime.strptime(applicable_for_str, "%d/%m/%Y")) cvs = month_cv[applicable_for] ldz = data_item[-3:-1] cvs[ldz] = Decimal(value_str) all_equal = len(set(map(len, month_cv.values()))) <= 1 last_date = max(month_cv.keys()) if last_date > next_month_start and all_equal: for dt in tuple(month_cv.keys()): if (dt.year, dt.month) != ( this_month_start.year, this_month_start.month): del month_cv[dt] self.log("The whole month's data is there.") month_ion = [v for k, v in sorted(month_cv.items())] script = dumps(month_ion) contract = Contract.get_non_core_by_name(sess, 'g_cv') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, rs.script) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: self.log( "There isn't a whole month there yet. The " "last date is " + hh_format(last_date) + ".") else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.")
def log(self, message): self.messages.appendleft( utc_datetime_now().strftime("%Y-%m-%d %H:%M:%S") + " - " + message) if len(self.messages) > 100: self.messages.pop()
def log(self, message): self.messages.appendleft( utc_datetime_now().strftime("%Y-%m-%d %H:%M:%S") + " - " + message) if len(self.messages) > 100: self.messages.pop()
def content(contract_id, days_hidden, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "channel_snags.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Hidden Days", "Chellow Id", "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Snag Description", "Import Related?", "Channel Type", "Start Date", "Finish Date", "Is Ignored?", "Days Since Snag Finished", "Duration Of Snag (Days)", ) writer.writerow(titles) contract = Contract.get_dc_by_id(sess, contract_id) now = utc_datetime_now() cutoff_date = now - relativedelta(days=days_hidden) for snag, channel, era, supply, site_era, site in (sess.query( Snag, Channel, Era, Supply, SiteEra, Site).join(Channel, Snag.channel_id == Channel.id).join( Era, Channel.era_id == Era.id).join( Supply, Era.supply_id == Supply.id).join( SiteEra, Era.site_eras).join( Site, SiteEra.site_id == Site.id).filter( SiteEra.is_physical == true(), Era.dc_contract == contract, Snag.start_date < cutoff_date, ).order_by( Site.code, Supply.id, Channel.imp_related, Channel.channel_type, Snag.description, Snag.start_date, Snag.id, )): snag_start = snag.start_date snag_finish = snag.finish_date imp_mc = "" if era.imp_mpan_core is None else era.imp_mpan_core exp_mc = "" if era.exp_mpan_core is None else era.exp_mpan_core if snag_finish is None: duration = now - snag_start age_of_snag = None else: duration = snag_finish - snag_start if hh_before(cutoff_date, snag_finish): age_of_snag = None else: delta = now - snag_finish age_of_snag = delta.days vals = { "Hidden Days": days_hidden, "Chellow Id": snag.id, "Imp MPAN Core": imp_mc, "Exp MPAN Core": exp_mc, "Site Code": site.code, "Site Name": site.name, "Snag Description": snag.description, "Import Related?": channel.imp_related, "Channel Type": channel.channel_type, "Start Date": snag_start, "Finish Date": snag_finish, "Is Ignored?": snag.is_ignored, "Days Since Snag Finished": age_of_snag, "Duration Of Snag (Days)": duration.days, } writer.writerow(csv_make_val(vals[t]) for t in titles) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user, compression): now = utc_datetime_now() report_context = {} try: comp = report_context['computer'] except KeyError: comp = report_context['computer'] = {} try: rate_cache = comp['rates'] except KeyError: rate_cache = comp['rates'] = {} try: ind_cont = report_context['contract_names'] except KeyError: ind_cont = report_context['contract_names'] = {} sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) start_date = scenario_props['scenario_start'] if start_date is None: start_date = utc_datetime(now.year, now.month, 1) else: start_date = to_utc(start_date) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', '').replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'forecast_from' in scenario_props: forecast_from = scenario_props['forecast_from'] else: forecast_from = None if forecast_from is None: forecast_from = chellow.computer.forecast_date() else: forecast_from = to_utc(forecast_from) sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] era_rows = [] for rate_script in get_map_list(scenario_props, 'local_rates'): contract_id = rate_script['contract_id'] try: cont_cache = rate_cache[contract_id] except KeyError: cont_cache = rate_cache[contract_id] = {} try: rate_script_start = rate_script['start_date'] except KeyError: raise BadRequest( "Problem in the scenario properties. Can't find the " + "'start_date' key of the contract " + str(contract_id) + " in the 'local_rates' map.") try: rate_script_start = rate_script['start_date'] except KeyError: raise BadRequest( "Problem in the scenario properties. Can't find the " + "'start_date' key of the contract " + str(contract_id) + " in the 'local_rates' map.") for dt in hh_range(report_context, rate_script_start, rate_script['finish_date']): cont_cache[dt] = PropDict('scenario properties', rate_script['script']) for rate_script in get_map_list(scenario_props, 'industry_rates'): contract_name = rate_script['contract_name'] try: cont_cache = ind_cont[contract_name] except KeyError: cont_cache = ind_cont[contract_name] = {} rfinish = rate_script['finish_date'] if rfinish is None: raise BadRequest("For the industry rate " + contract_name + " the " "finish_date can't be null.") for dt in hh_range(report_context, rate_script['start_date'], rfinish): cont_cache[dt] = PropDict('scenario properties', rate_script['script']) era_maps = scenario_props.get('era_maps', {}) scenario_hh = scenario_props.get('hh_data', {}) era_header_titles = [ 'creation-date', 'imp-mpan-core', 'imp-supplier-contract', 'exp-mpan-core', 'exp-supplier-contract', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month' ] site_header_titles = [ 'creation-date', 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types' ] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp' ] title_dict = {} for cont_type, con_attr in (('mop', Era.mop_contract), ('dc', Era.dc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr).join(Era.supply). \ join(Source).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception("For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \ join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date) ).order_by(Tpr.code).distinct() for tpr in tpr_query.filter(Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['imp-supplier'].append(tpr.code + suffix) for tpr in tpr_query.filter(Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['exp-supplier'].append(tpr.code + suffix) era_rows.append( era_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) site_rows.append(site_header_titles + summary_titles) sites = sites.all() deltas = {} for site in sites: try: site_scenario_hh = scenario_hh[site.code] except KeyError: site_scenario_hh = scenario_hh[site.code] = {} site_deltas = deltas[site.code] = {'hhs': {}} delts = site_deltas['supply_deltas'] = {} for is_import in (True, False): delts[is_import] = {} for src in ('gen', 'net', 'gen-net', '3rd-party', '3rd-party-reverse', 'sub'): delts[is_import][src] = {'site': {}} earliest_delta = to_utc(Datetime.max) latest_delta = to_utc(Datetime.min) found_hh = False for typ in ('used', 'generated', 'parasitic', 'gen_net'): hh_str = site_scenario_hh.get(typ, '') hh_data = site_scenario_hh[typ] = {} for row in csv.reader(StringIO(hh_str)): cells = [cell.strip() for cell in row] if len(''.join(cells)) == 0: continue if len(cells) != 2: raise BadRequest( "Can't interpret the row " + str(cells) + " it should be of the form 'timestamp, kWh'") date_str, kw_str = cells ts = parse_hh_start(date_str) earliest_delta = min(ts, earliest_delta) latest_delta = max(ts, latest_delta) hh_data[ts] = float(kw_str) found_hh = True if not found_hh: continue scenario_used = site_scenario_hh['used'] scenario_generated = site_scenario_hh['generated'] scenario_parasitic = site_scenario_hh['parasitic'] scenario_gen_net = site_scenario_hh['gen_net'] month_start = utc_datetime(earliest_delta.year, earliest_delta.month) while month_start <= latest_delta: month_finish = month_start + relativedelta(months=1) - HH chunk_start = hh_max(month_start, earliest_delta) chunk_finish = hh_min(month_finish, latest_delta) site_ds = chellow.computer.SiteSource(sess, site, chunk_start, chunk_finish, forecast_from, report_context) hh_map = dict((h['start-date'], h) for h in site_ds.hh_data) for era in sess.query(Era).join(SiteEra).join(Pc).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.imp_mpan_core != null(), Pc.code != '00', Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), ~Era.channels.any()): if supply_id is not None and era.supply_id != supply_id: continue ss_start = hh_max(era.start_date, chunk_start) ss_finish = hh_min(era.finish_date, chunk_finish) ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, True, report_context) for hh in ss.hh_data: sdatum = hh_map[hh['start-date']] sdatum['import-net-kwh'] += hh['msp-kwh'] sdatum['used-kwh'] += hh['msp-kwh'] for era in sess.query(Era).join(SiteEra).join(Pc).join( Supply).join(Source).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.imp_mpan_core != null(), Era.start_date <= chunk_finish, or_(Era.finish_date == null(), Era.finish_date >= chunk_start), Source.code == 'gen-net'): if supply_id is not None and era.supply_id != supply_id: continue ss_start = hh_max(era.start_date, chunk_start) ss_finish = hh_min(era.finish_date, chunk_finish) ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, False, report_context) for hh in ss.hh_data: sdatum = hh_map[hh['start-date']] try: sdatum['gen-net-kwh'] += hh['msp-kwh'] except KeyError: sdatum['gen-net-kwh'] = hh['msp-kwh'] for hh in site_ds.hh_data: hh_start = hh['start-date'] if hh_start in scenario_used: used_delt = scenario_used[hh_start] - hh['used-kwh'] imp_net_delt = 0 exp_net_delt = 0 if used_delt < 0: diff = hh['import-net-kwh'] + used_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt += used_delt else: diff = hh['export-net-kwh'] - used_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt -= used_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['used-kwh'] += used_delt hh['msp-kwh'] -= exp_net_delt if hh_start in scenario_generated: imp_gen_delt = scenario_generated[hh_start] - \ hh['import-gen-kwh'] imp_net_delt = 0 exp_net_delt = 0 if imp_gen_delt < 0: diff = hh['export-net-kwh'] + imp_gen_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt += imp_gen_delt else: diff = hh['import-net-kwh'] - imp_gen_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt -= imp_gen_delt try: delts[True]['gen']['site'][hh_start] += \ imp_gen_delt except KeyError: delts[True]['gen']['site'][hh_start] = imp_gen_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['import-gen-kwh'] += imp_gen_delt hh['msp-kwh'] -= imp_net_delt if hh_start in scenario_parasitic: exp_gen_delt = scenario_parasitic[hh_start] - \ hh['export-gen-kwh'] imp_net_delt = 0 exp_net_delt = 0 if exp_gen_delt < 0: diff = hh['import-net-kwh'] + exp_gen_delt if diff < 0: imp_net_delt -= hh['import-net-kwh'] exp_net_delt -= diff else: imp_net_delt += exp_gen_delt else: diff = hh['export-net-kwh'] - exp_gen_delt if diff < 0: exp_net_delt -= hh['export-net-kwh'] imp_net_delt -= diff else: exp_net_delt -= exp_gen_delt try: delts[False]['gen']['site'][hh_start] += \ imp_gen_delt except KeyError: delts[False]['gen']['site'][hh_start] = \ exp_gen_delt try: delts[False]['net']['site'][hh_start] += \ exp_net_delt except KeyError: delts[False]['net']['site'][hh_start] = \ exp_net_delt try: delts[True]['net']['site'][hh_start] += \ imp_net_delt except KeyError: delts[True]['net']['site'][hh_start] = imp_net_delt hh['import-net-kwh'] += imp_net_delt hh['export-net-kwh'] += exp_net_delt hh['export-gen-kwh'] += exp_gen_delt hh['msp-kwh'] -= imp_net_delt if hh_start in scenario_gen_net: gen_net_delt = scenario_gen_net[hh_start] - \ hh['gen-net-kwh'] try: delts[False]['gen-net']['site'][hh_start] += \ gen_net_delt except KeyError: delts[False]['gen-net']['site'][hh_start] = \ gen_net_delt hh['import-gen-kwh'] += gen_net_delt hh['export-net-kwh'] += gen_net_delt site_deltas['hhs'][hh_start] = hh month_start += relativedelta(months=1) month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) calcs = [] for era in sess.query(Era).join(SiteEra).join(Pc).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.start_date <= month_finish, or_(Era.finish_date == null(), Era.finish_date >= month_start)).options( joinedload(Era.ssc), joinedload(Era.dc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload(Supply.dno), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras)).order_by(Pc.code): supply = era.supply if supply.generator_type is not None: site_gen_types.add(supply.generator_type.code) if supply_id is not None and supply.id != supply_id: continue ss_start = hh_max(era.start_date, month_start) ss_finish = hh_min(era.finish_date, month_finish) if era.imp_mpan_core is None: imp_ss = None else: sup_deltas = site_deltas['supply_deltas'][True][ supply.source.code] imp_ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, True, report_context, era_maps=era_maps, deltas=sup_deltas) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: sup_deltas = site_deltas['supply_deltas'][False][ supply.source.code] exp_ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era, False, report_context, era_maps=era_maps, deltas=sup_deltas) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append((order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) # Check if gen deltas haven't been consumed extra_sss = set() for is_imp in (True, False): sup_deltas = site_deltas['supply_deltas'][is_imp]['gen'] if len( list(t for t in sup_deltas['site'] if month_start <= t <= month_finish)) > 0: extra_sss.add(is_imp) displaced_era = chellow.computer.displaced_era( sess, report_context, site, month_start, month_finish, forecast_from, has_scenario_generation=len(extra_sss) > 0) if len(extra_sss) > 0: if True in extra_sss: sup_deltas = site_deltas['supply_deltas'][True]['gen'] imp_ss_name = site.code + "_extra_gen_TRUE" imp_ss = ScenarioSource( sess, month_start, month_finish, True, report_context, sup_deltas, displaced_era.imp_supplier_contract, imp_ss_name) else: imp_ss_name = imp_ss = None if False in extra_sss: exp_ss_name = site.code + "_extra_gen_FALSE" sup_deltas = site_deltas['supply_deltas'][False]['gen'] exp_ss = ScenarioSource( sess, month_start, month_finish, False, report_context, sup_deltas, displaced_era.imp_supplier_contract, imp_ss_name) else: exp_ss_name = exp_ss = None calcs.append((0, imp_ss_name, exp_ss_name, imp_ss, exp_ss)) # Check if exp net deltas haven't been consumed sup_deltas = site_deltas['supply_deltas'][False]['net'] if len( list(t for t in sup_deltas['site'] if month_start <= t <= month_finish)) > 0: ss_name = site.code + "_extra_net_export" ss = SupplySource(sess, month_start, month_finish, forecast_from, displaced_era, False, report_context, era_maps=era_maps, deltas=sup_deltas) calcs.append((0, None, ss_name, None, ss)) site_ds = chellow.computer.SiteSource(sess, site, month_start, month_finish, forecast_from, report_context, displaced_era, deltas=site_deltas) if displaced_era is not None and supply_id is None: month_data = {} for sname in ('import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = month_data['displaced-kwh'] = sum( hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill') if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += 'For the supply ' + \ site_ds.mpan_core + ' the virtual bill ' + \ str(disp_supplier_bill) + ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ now, None, disp_supplier_contract.name, None, None, displaced_era.meter_category, 'displaced', None, None, None, None, site.code, site.name, '', month_finish ] + [month_data[t] for t in summary_titles ] + [None] + [None] * len(title_dict['mop']) + [ None ] + [None] * len(title_dict['dc']) + [ None ] + make_bill_row(title_dict['imp-supplier'], disp_supplier_bill) era_rows.append(out) for k, v in month_data.items(): site_month_data[k] += v for i, (order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: source_code = exp_ss.source_code supply = exp_ss.supply else: source_code = imp_ss.source_code supply = imp_ss.supply site_sources.add(source_code) month_data = {} for name in ('import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if imp_ss is not None: imp_supplier_contract = imp_ss.supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill') if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: gbp = 0 imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['import-net-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh if source_code == 'gen-net': month_data['export-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == 'gen': month_data['import-gen-kwh'] += kwh if exp_ss is not None: exp_supplier_contract = exp_ss.supplier_contract if exp_supplier_contract is not None: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill') export_vb_function(exp_ss) kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['export-net-gbp'] += gbp month_data['export-net-kwh'] += kwh if source_code == 'gen-net': month_data['import-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == '3rd-party-reverse': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = sss.dc_contract if dc_contract is not None: sss.contract_func(dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = sss.mop_contract if mop_contract is not None: mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp month_data['used-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp generator_type = sss.generator_type_code if source_code in ('gen', 'gen-net'): site_gen_types.add(generator_type) era_category = sss.measurement_type if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[era_category]: site_category = era_category era_associates = set() if mop_contract is not None: era_associates.update({ s.site.code for s in era.site_eras if not s.is_physical }) for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / \ bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) if imp_ss is None: imp_supplier_contract_name = None pc_code = exp_ss.pc_code else: if imp_supplier_contract is None: imp_supplier_contract_name = '' else: imp_supplier_contract_name = \ imp_supplier_contract.name pc_code = imp_ss.pc_code if exp_ss is None: exp_supplier_contract_name = None else: if exp_supplier_contract is None: exp_supplier_contract_name = '' else: exp_supplier_contract_name = \ exp_supplier_contract.name out = [ now, imp_mpan_core, imp_supplier_contract_name, exp_mpan_core, exp_supplier_contract_name, era_category, source_code, generator_type, sss.supply_name, sss.msn, pc_code, site.code, site.name, ','.join(sorted(list(era_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + \ make_bill_row(title_dict['mop'], mop_bill) + [None] + \ make_bill_row(title_dict['dc'], dc_bill) if imp_ss is None: out += [None] * (len(title_dict['imp-supplier']) + 1) else: out += [None] + make_bill_row( title_dict['imp-supplier'], imp_supplier_bill) if exp_ss is not None: out += [None] + make_bill_row( title_dict['exp-supplier'], exp_supplier_bill) for k, v in month_data.items(): site_month_data[k] += v era_rows.append(out) site_rows.append([ now, site.code, site.name, ', '.join( s.code for s in site.find_linked_sites( sess, month_start, month_finish)), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types))) ] + [site_month_data[k] for k in summary_titles]) sess.rollback() write_spreadsheet(rf, compression, site_rows, era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def _log(self, msg): with import_lock: self.log.appendleft( utc_datetime_now().strftime("%Y-%m-%d %H:%M:%S") + ' - ' + msg)
def run_inner(self, sess): self.log("Starting to check GCv rates.") contract = Contract.get_non_core_by_name(sess, 'g_cv') latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id this_month_start = latest_rs.start_date + relativedelta(months=1) next_month_start = this_month_start + relativedelta(months=1) now = utc_datetime_now() props = contract.make_properties() if props.get('enabled', False): search_finish = next_month_start + relativedelta(days=1) if now > search_finish: url = props['url'] self.log("Checking to see if data is available from " + hh_format(this_month_start) + " to " + hh_format(search_finish) + " at " + url) res = requests.post( url, data={ 'LatestValue': 'true', 'PublicationObjectIds': '408:12265,+408:4636,+408:4637,+408:4639,' '+408:4638,+408:4640,+408:4641,+408:4642,' '+408:4643,+408:4644,+408:4645,+408:4646,' '+408:4647,+408:4648,+408:12269,+408:12268,' '+408:12270,+408:12266,+408:12267', 'Applicable': 'applicableFor', 'PublicationObjectCount': '19', 'FromUtcDatetime': param_format(this_month_start), 'ToUtcDateTime': param_format(search_finish), 'FileType': 'Csv' }) self.log("Received " + str(res.status_code) + " " + res.reason) month_cv = defaultdict(dict) cf = csv.reader(res.text.splitlines()) row = next(cf) # Skip title row last_date = to_utc(Datetime.min) for row in cf: applicable_at_str = row[0] applicable_for_str = row[1] applicable_for = to_utc( Datetime.strptime(applicable_for_str, "%d/%m/%Y")) data_item = row[2] value_str = row[3] if 'LDZ' in data_item and \ this_month_start <= applicable_for < \ next_month_start: ldz = data_item[-3:-1] cvs = month_cv[ldz] applicable_at = to_utc( Datetime.strptime(applicable_at_str, "%d/%m/%Y %H:%M:%S")) last_date = max(last_date, applicable_at) cv = Decimal(value_str) try: existing = cvs[applicable_for.day] if applicable_at > existing['applicable_at']: existing['cv'] = cv existing['applicable_at'] = applicable_at except KeyError: cvs[applicable_for.day] = { 'cv': cv, 'applicable_at': applicable_at } all_equal = len(set(map(len, month_cv.values()))) <= 1 if last_date + Timedelta(days=1) >= next_month_start and \ all_equal: self.log("The whole month's data is there.") script = {'cvs': month_cv} contract = Contract.get_non_core_by_name(sess, 'g_cv') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, loads(rs.script)) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: self.log("There isn't a whole month there yet. The " "last date is " + hh_format(last_date) + ".") else: self.log("The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.")
def forecast_date(): now = utc_datetime_now() return utc_datetime(now.year, now.month, 1)
def _log(self, msg): with import_lock: self.log.appendleft( utc_datetime_now().strftime("%Y-%m-%d %H:%M:%S") + ' - ' + msg)
def do_post(sess): base_name = [] now = utc_datetime_now() if "scenario_id" in request.values: scenario_id = req_int("scenario_id") scenario = Scenario.get_by_id(sess, scenario_id) scenario_props = scenario.props base_name.append(scenario.name) start_year = scenario_props["scenario_start_year"] start_month = scenario_props["scenario_start_month"] start_date_ct = ct_datetime(now.year, now.month, 1) if start_year is None: scenario_props["scenario_start_year"] = start_date_ct.year if start_month is None: scenario_props["scenario_start_month"] = start_date_ct.month else: year = req_int("finish_year") month = req_int("finish_month") months = req_int("months") start_date, _ = next( c_months_c(finish_year=year, finish_month=month, months=months)) by_hh = req_bool("by_hh") scenario_props = { "scenario_start_year": start_date.year, "scenario_start_month": start_date.month, "scenario_duration": months, "by_hh": by_hh, } base_name.append("monthly_duration") try: site_id = req_int("site_id") if "site_id" in request.values else None if "site_codes" in request.values: site_codes = req_str("site_codes").splitlines() # Check sites codes are valid for site_code in site_codes: Site.get_by_code(sess, site_code) else: site_codes = [] if "supply_id" in request.values: supply_id = req_int("supply_id") else: supply_id = None if "compression" in request.values: compression = req_bool("compression") else: compression = True user = g.user args = ( scenario_props, base_name, site_id, supply_id, user, compression, site_codes, now, ) threading.Thread(target=content, args=args).start() return chellow_redirect("/downloads", 303) except BadRequest as e: flash(e.description) now = Datetime.utcnow() month_start = Datetime(now.year, now.month, 1) - relativedelta(months=1) month_finish = Datetime(now.year, now.month, 1) - HH return make_response( render_template( "ods_monthly_duration.html", month_start=month_start, month_finish=month_finish, ), 400, )
def content( scenario_props, scenario_id, base_name, site_id, supply_id, user, compression): now = utc_datetime_now() report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = to_utc(rate_start) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = utc_datetime(now.year, now.month, 1) else: start_date = to_utc(start_date) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = to_utc(kwh_start) sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] era_rows = [] changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = to_utc(Datetime.strptime(date_str.strip(), "%Y-%m-%d")) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) era_header_titles = [ 'creation-date', 'imp-mpan-core', 'imp-supplier-contract', 'exp-mpan-core', 'exp-supplier-contract', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'creation-date', 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr).join(Era.supply). \ join(Source).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \ join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date) ).order_by(Tpr.code).distinct() for tpr in tpr_query.filter(Era.imp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['imp-supplier'].append(tpr.code + suffix) for tpr in tpr_query.filter(Era.exp_supplier_contract != null()): for suffix in ('-kwh', '-rate', '-gbp'): title_dict['exp-supplier'].append(tpr.code + suffix) era_rows.append( era_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) site_rows.append(site_header_titles + summary_titles) sites = sites.all() month_start = start_date print("start date", start_date, "finish date", finish_date) while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) calcs = [] deltas = defaultdict(int) for era in sess.query(Era).join(SiteEra).filter( SiteEra.site == site, SiteEra.is_physical == true(), Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)).options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras)): supply = era.supply if supply.generator_type is not None: site_gen_types.add(supply.generator_type.code) if supply_id is not None and supply.id != supply_id: continue if era.start_date > month_start: ss_start = era.start_date else: ss_start = month_start if hh_before(era.finish_date, month_finish): ss_finish = era.finish_date else: ss_finish = month_finish if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, report_context, site, month_start, month_finish, kwh_start) site_ds = chellow.computer.SiteSource( sess, site, month_start, month_finish, kwh_start, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill') if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += 'For the supply ' + \ site_ds.mpan_core + ' the virtual bill ' + \ str(disp_supplier_bill) + ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ now, None, disp_supplier_contract.name, None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, '', month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ None] * len(title_dict['mop']) + [None] + [ None] * len(title_dict['dc']) + [None] + make_bill_row( title_dict['imp-supplier'], disp_supplier_bill) era_rows.append(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data) import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill') if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: gbp = 0 imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['import-net-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh if source_code == 'gen-net': month_data['export-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == 'gen': month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is not None: kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill') export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['export-net-gbp'] += gbp month_data['export-net-kwh'] += kwh if source_code == 'gen-net': month_data['import-gen-kwh'] += kwh elif source_code == '3rd-party': month_data['export-3rd-party-gbp'] += gbp month_data['export-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] -= gbp month_data['used-3rd-party-kwh'] -= kwh month_data['used-gbp'] -= gbp month_data['used-kwh'] -= kwh elif source_code == '3rd-party-reverse': month_data['import-3rd-party-gbp'] += gbp month_data['import-3rd-party-kwh'] += kwh month_data['used-3rd-party-gbp'] += gbp month_data['used-3rd-party-kwh'] += kwh month_data['used-gbp'] += gbp month_data['used-kwh'] += kwh elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func(dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp month_data['used-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None era_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[era_category]: site_category = era_category era_associates = { s.site.code for s in era.site_eras if not s.is_physical} for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ now, era.imp_mpan_core, ( None if imp_supplier_contract is None else imp_supplier_contract.name), era.exp_mpan_core, ( None if exp_supplier_contract is None else exp_supplier_contract.name), era_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(era_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + \ make_bill_row(title_dict['mop'], mop_bill) + [None] + \ make_bill_row(title_dict['dc'], dc_bill) if imp_supplier_contract is None: out += [None] * (len(title_dict['imp-supplier']) + 1) else: out += [None] + make_bill_row( title_dict['imp-supplier'], imp_supplier_bill) if exp_supplier_contract is not None: out += [None] + make_bill_row( title_dict['exp-supplier'], exp_supplier_bill) for k, v in month_data.items(): site_month_data[k] += v era_rows.append(out) site_rows.append( [ now, site.code, site.name, ', '.join( s.code for s in site.find_linked_sites( sess, month_start, month_finish)), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) write_spreadsheet(rf, compression, site_rows, era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check RCRCs.") contract = Contract.get_non_core_by_name(sess, "rcrc") latest_rs = ( sess.query(RateScript) .filter(RateScript.contract_id == contract.id) .order_by(RateScript.start_date.desc()) .first() ) latest_rs_id = latest_rs.id latest_rs_start = latest_rs.start_date month_start = latest_rs_start + relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH now = utc_datetime_now() if now > month_finish: self.log( "Checking to see if data is available from " + hh_format(month_start) + " to " + hh_format(month_finish) + " on Elexon Portal." ) config = Contract.get_non_core_by_name(sess, "configuration") props = config.make_properties() scripting_key = props.get(ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " "properties." ) contract_props = contract.make_properties() url_str = "".join( ( contract_props["url"], "file/download/RCRC_FILE?key=", scripting_key, ) ) r = requests.get(url_str, timeout=60) parser = csv.reader( (x.decode() for x in r.iter_lines()), delimiter=",", quotechar='"', ) next(parser) next(parser) month_rcrcs = {} for values in parser: hh_date = utc_datetime_parse(values[0], "%d/%m/%Y") hh_date += relativedelta(minutes=30 * int(values[2])) if month_start <= hh_date <= month_finish: month_rcrcs[key_format(hh_date)] = Decimal(values[3]) if key_format(month_finish) in month_rcrcs: self.log("The whole month's data is there.") script = {"rates": month_rcrcs} contract = Contract.get_non_core_by_name(sess, "rcrc") rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, month_finish, loads(rs.script) ) contract.insert_rate_script(sess, month_start, script) sess.commit() self.log( "Added a new rate script starting at " + hh_format(month_start) + "." ) else: msg = "There isn't a whole month there yet." if len(month_rcrcs) > 0: msg += ( " The last date is " + sorted(month_rcrcs.keys())[-1] ) self.log(msg) except BaseException: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: self.lock.release() self.log("Finished checking RCRC rates.") if sess is not None: sess.close() self.going.wait(30 * 60) self.going.clear()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = book = sheet = None try: sess = Session() self.log("Starting to check BSUoS rates.") contract = Contract.get_non_core_by_name(sess, 'bsuos') latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id this_month_start = latest_rs.start_date + \ relativedelta(months=1) next_month_start = this_month_start + \ relativedelta(months=1) now = utc_datetime_now() props = contract.make_properties() if props.get('enabled', False): if now > next_month_start: url = props['url'] self.log( "Checking to see if data is available from " + str(this_month_start) + " to " + str(next_month_start - HH) + " at " + url) res = requests.get(url) self.log( "Received " + str(res.status_code) + " " + res.reason) book = xlrd.open_workbook( file_contents=res.content) sheet = book.sheet_by_index(0) month_bsuos = {} for row_index in range(1, sheet.nrows): row = sheet.row(row_index) raw_date = Datetime( *xlrd.xldate_as_tuple( row[0].value, book.datemode)) hh_date_ct = to_ct(raw_date) hh_date = to_utc(hh_date_ct) hh_date += relativedelta( minutes=30*int(row[1].value)) if not hh_date < this_month_start and \ hh_date < next_month_start: month_bsuos[key_format(hh_date)] = \ row[2].value if key_format(next_month_start - HH) in \ month_bsuos: self.log("The whole month's data is there.") script = "def rates_gbp_per_mwh():\n " \ "return {\n" + ',\n'.join( "'" + k + "': " + str(month_bsuos[k]) for k in sorted( month_bsuos.keys())) + "}" contract = Contract.get_non_core_by_name( sess, 'bsuos') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, rs.script) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: self.log( "There isn't a whole month there yet. The " "last date is " + sorted(month_bsuos.keys())[-1]) else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: book = sheet = None if sess is not None: sess.close() self.lock.release() self.log("Finished checking BSUoS rates.") self.going.wait(30 * 60) self.going.clear()
def https_handler(sess, log_f, properties, contract, now=None): url_template_str = properties["url_template"] url_values = properties.get("url_values", {}) download_days = properties["download_days"] if now is None: now = utc_datetime_now() window_finish = utc_datetime(now.year, now.month, now.day) - HH window_start = utc_datetime(now.year, now.month, now.day) - Timedelta(days=download_days) log_f(f"Window start: {hh_format(window_start)}") log_f(f"Window finish: {hh_format(window_finish)}") env = jinja2.Environment(autoescape=True, undefined=jinja2.StrictUndefined) url_template = env.from_string(url_template_str) for era in (sess.query(Era).filter( Era.dc_contract == contract, Era.start_date <= window_finish, or_(Era.finish_date == null(), Era.finish_date >= window_start), ).distinct()): chunk_start = hh_max(era.start_date, window_start) chunk_finish = hh_min(era.finish_date, window_finish) for mpan_core in (era.imp_mpan_core, era.exp_mpan_core): if mpan_core is None: continue log_f(f"Looking at MPAN core {mpan_core}.") vals = {"chunk_start": chunk_start, "chunk_finish": chunk_finish} vals.update(url_values.get(mpan_core, {})) try: url = url_template.render(vals) except jinja2.exceptions.UndefinedError as e: raise BadRequest( f"Problem rendering the URL template: {url_template_str}. " f"The problem is: {e}. This can be fixed by editing the " f"properties of this contract.") log_f(f"Retrieving data from {url}.") sess.rollback() # Avoid long transactions res = requests.get(url, timeout=120) res.raise_for_status() result = requests.get(url, timeout=120).json() if isinstance(result, dict): result_data = result["DataPoints"] elif isinstance(result, list): result_data = result else: raise BadRequest( f"Expecting a JSON object at the top level, but instead got " f"{result}") raw_data = [] for jdatum in result_data: raw_data.append( dict( mpan_core=mpan_core, start_date=utc_datetime(1, 1, 1) + Timedelta(seconds=jdatum["Time"] / 10000000), channel_type="ACTIVE", value=jdatum["Value"], status="A", )) HhDatum.insert(sess, raw_data, contract) sess.commit() log_f("Finished loading.") return False
def https_handler(self, sess, properties, contract): url_template_str = properties['url_template'] url_values = properties.get('url_values', {}) download_days = properties['download_days'] now = utc_datetime_now() window_finish = utc_datetime(now.year, now.month, now.day) - HH window_start = utc_datetime(now.year, now.month, now.day) - Timedelta(days=download_days) self.log("Window start: " + hh_format(window_start)) self.log("Window finish: " + hh_format(window_finish)) env = jinja2.Environment(autoescape=True, undefined=jinja2.StrictUndefined) url_template = env.from_string(url_template_str) for era in sess.query(Era).filter( Era.dc_contract == contract, Era.start_date <= window_finish, or_(Era.finish_date == null(), Era.finish_date >= window_start)).distinct(): chunk_start = hh_max(era.start_date, window_start) chunk_finish = hh_min(era.finish_date, window_finish) for mpan_core in (era.imp_mpan_core, era.exp_mpan_core): if mpan_core is None: continue self.log("Looking at MPAN core {mpan_core}.".format( mpan_core=mpan_core)) vals = { 'chunk_start': chunk_start, 'chunk_finish': chunk_finish } vals.update(url_values.get(mpan_core, {})) try: url = url_template.render(vals) except jinja2.exceptions.UndefinedError as e: raise BadRequest( "Problem rendering the URL template: " + url_template_str + ". The problem is: " + str(e) + ". This can be fixed by " + "editing the properties of this contract.") self.log("Retrieving data from {url}.".format(url=url)) res = requests.get(url) res.raise_for_status() result = requests.get(url).json() if isinstance(result, dict): result_data = result['DataPoints'] elif isinstance(result, list): result_data = result else: raise BadRequest( "Expecting a JSON object at the top level, but " "instead got " + str(result)) raw_data = [] for jdatum in result_data: raw_data.append( dict(mpan_core=mpan_core, start_date=utc_datetime(1, 1, 1) + Timedelta(seconds=jdatum['Time'] / 10000000), channel_type='ACTIVE', value=jdatum['Value'], status='A')) HhDatum.insert(sess, raw_data, contract) sess.commit() self.log("Finished loading.") return False
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check TLMs.") contract = Contract.get_non_core_by_name(sess, 'tlms') latest_rs = sess.query(RateScript).filter( RateScript.contract_id == contract.id).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id next_month_start = latest_rs.start_date + \ relativedelta(months=1) next_month_finish = latest_rs.start_date + \ relativedelta(months=2) - HH now = utc_datetime_now() if now > next_month_start: self.log( "Checking to see if data is available from " + str(next_month_start) + " to " + str(next_month_finish) + " on Elexon Portal.") config = Contract.get_non_core_by_name( sess, 'configuration') props = config.make_properties() scripting_key = props.get( ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " + "properties.") contract_props = contract.make_properties() url_str = ''.join( ( contract_props['url'], 'file/download/TLM_FILE?key=', scripting_key)) r = requests.get(url_str) parser = csv.reader( (l.decode() for l in r.iter_lines()), delimiter=',', quotechar='"') self.log("Opened " + url_str + ".") next(parser, None) month_tlms = {} for values in parser: hh_date_ct = to_ct( Datetime.strptime(values[0], "%d/%m/%Y")) hh_date = to_utc(hh_date_ct) hh_date += relativedelta(minutes=30*int(values[2])) if next_month_start <= hh_date <= \ next_month_finish: month_tlms[key_format(hh_date)] = { 'off-taking': values[3], 'delivering': values[4]} if key_format(next_month_finish) in month_tlms: self.log("The whole month's data is there.") script = "def tlms():\n return {\n" + \ ',\n'.join( "'" + k + "': " + month_tlms[k]['off-taking'] for k in sorted(month_tlms.keys())) + "}" contract = Contract.get_non_core_by_name( sess, 'tlms') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, rs.script) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: msg = "There isn't a whole month there yet." if len(month_tlms) > 0: msg += "The last date is " + \ sorted(month_tlms.keys())[-1] self.log(msg) except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking TLM rates.") self.going.wait(30 * 60) self.going.clear()
def fetch_cvs(sess, log_f): log_f("Starting to check GCv rates.") contract = Contract.get_non_core_by_name(sess, "g_cv") latest_rs = (sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()).first()) latest_rs_id = latest_rs.id latest_rs_start_date_ct = to_ct(latest_rs.start_date) month_pairs = list( c_months_u( start_year=latest_rs_start_date_ct.year, start_month=latest_rs_start_date_ct.month, months=2, )) month_start, month_finish = month_pairs[1] now = utc_datetime_now() props = contract.make_properties() if not props.get("enabled", False): log_f("The automatic importer is disabled. To enable it, edit the " "contract properties to set 'enabled' to true.") return search_start = month_start - relativedelta(days=1) search_finish = month_finish + relativedelta(days=1) if now <= search_finish: return url = props["url"] log_f(f"Checking to see if data is available " f"from {hh_format(search_start)} to " f"{hh_format(search_finish)} at {url}") res = requests.post( url, data={ "LatestValue": "true", "PublicationObjectIds": "408:28,+408:5328,+408:5320,+408:5291," "+408:5366,+408:5312,+408:5346,+408:5324,+408:5316,+408:5308," "+408:5336,+408:5333,+408:5342,+408:5354,+408:82,+408:70," "+408:59,+408:38,+408:49", "PublicationObjectStagingIds": "PUBOBJ1660,PUBOB4507,PUBOB4508," "PUBOB4510,PUBOB4509,PUBOB4511,PUBOB4512,PUBOB4513,PUBOB4514," "PUBOB4515,PUBOB4516,PUBOB4517,PUBOB4518,PUBOB4519,PUBOB4521," "PUBOB4520,PUBOB4522,PUBOBJ1661,PUBOBJ1662", "Applicable": "applicableFor", "PublicationObjectCount": "19", "FromUtcDatetime": param_format(search_start), "ToUtcDateTime": param_format(search_finish), "FileType": "Csv", }, ) log_f(f"Received {res.status_code} {res.reason}") month_cv = defaultdict(dict) cf = csv.reader(res.text.splitlines()) row = next(cf) # Skip title row last_date = utc_datetime(1900, 1, 1) for row in cf: applicable_at_str = row[0] applicable_for_str = row[1] applicable_for = to_utc( to_ct(Datetime.strptime(applicable_for_str, "%d/%m/%Y"))) data_item = row[2] value_str = row[3] if "LDZ" in data_item and month_start <= applicable_for < month_finish: ldz = data_item[-3:-1] cvs = month_cv[ldz] applicable_at = to_utc( to_ct(Datetime.strptime(applicable_at_str, "%d/%m/%Y %H:%M:%S"))) last_date = max(last_date, applicable_at) cv = Decimal(value_str) try: existing = cvs[applicable_for.day] if applicable_at > existing["applicable_at"]: existing["cv"] = cv existing["applicable_at"] = applicable_at except KeyError: cvs[applicable_for.day] = { "cv": cv, "applicable_at": applicable_at } all_equal = len(set(map(len, month_cv.values()))) <= 1 if last_date + Timedelta(days=1) > month_finish and all_equal: log_f("The whole month's data is there.") script = {"cvs": month_cv} contract = Contract.get_non_core_by_name(sess, "g_cv") rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script(sess, rs, rs.start_date, month_finish, loads(rs.script)) sess.flush() contract.insert_rate_script(sess, month_start, script) sess.commit() log_f("Added new rate script.") else: log_f(f"There isn't a whole month there yet. The " f"last date is {hh_format(last_date)}.")