def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = self.global_alert = None try: sess = Session() self.log("Starting to check BSUoS rates.") contract = Contract.get_non_core_by_name(sess, 'bsuos') props = contract.make_properties() if props.get('enabled', False): urls = set(props.get('urls', [])) if props.get('discover_urls', False): urls.update(_discover_urls(self.log)) url_list = sorted(urls) self.log("List of URLs to process: " + str(url_list)) for url in url_list: self.process_url(sess, url, contract) else: self.log("The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except BaseException: self.log("Outer problem " + traceback.format_exc()) self.global_alert = \ "There's a problem with the BSUoS automatic importer." sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking BSUoS rates.") self.going.wait(60 * 60 * 24) self.going.clear()
def run(self): sess = None try: self._log( "Starting to parse the file with '" + self.parser_name + "'.") sess = Session() g_batch = GBatch.get_by_id(sess, self.g_batch_id) raw_bills = self.parser.make_raw_bills() self._log( "Successfully parsed the file, and now I'm starting to " "insert the raw bills.") for self.bill_num, raw_bill in enumerate(raw_bills): try: bill_type = BillType.get_by_code( sess, raw_bill['bill_type_code']) g_bill = g_batch.insert_g_bill( sess, bill_type, raw_bill['mprn'], raw_bill['reference'], raw_bill['account'], raw_bill['issue_date'], raw_bill['start_date'], raw_bill['finish_date'], raw_bill['kwh'], raw_bill['net_gbp'], raw_bill['vat_gbp'], raw_bill['gross_gbp'], raw_bill['raw_lines'], raw_bill['breakdown']) sess.flush() for raw_read in raw_bill['reads']: prev_type = GReadType.get_by_code( sess, raw_read['prev_type_code']) pres_type = GReadType.get_by_code( sess, raw_read['pres_type_code']) g_units = GUnits.get_by_code(sess, raw_read['units']) g_read = g_bill.insert_g_read( sess, raw_read['msn'], raw_read['prev_value'], raw_read['prev_date'], prev_type, raw_read['pres_value'], raw_read['pres_date'], pres_type, g_units, raw_read['correction_factor'], raw_read['calorific_value']) sess.expunge(g_read) sess.commit() self.successful_bills.append(raw_bill) sess.expunge(g_bill) except BadRequest as e: sess.rollback() raw_bill['error'] = e.description self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: self._log( "The import has finished, but " + str(len(self.failed_bills)) + " bills failed to load.") except: self._log("I've encountered a problem: " + traceback.format_exc()) finally: if sess is not None: sess.close()
def _import_tlms(log_func): sess = None cache = {} try: sess = Session() log_func("Starting to check TLMs.") contract = Contract.get_non_core_by_name(sess, "tlms") contract_props = contract.make_properties() if contract_props.get("enabled", False): config = Contract.get_non_core_by_name(sess, "configuration") props = config.make_properties() scripting_key = props.get(ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration properties.") url_str = "".join((contract_props["url"], "file/download/TLM_FILE?key=", scripting_key)) r = requests.get(url_str) parser = csv.reader((x.decode() for x in r.iter_lines()), delimiter=",", quotechar='"') log_func("Opened " + url_str + ".") next(parser, None) for i, values in enumerate(parser): if values[3] == "": for zone in GSP_GROUP_LOOKUP.keys(): values[3] = zone _process_line(cache, sess, contract, log_func, values) else: _process_line(cache, sess, contract, log_func, values) _save_cache(sess, cache) else: log_func("The importer is disabled. Set 'enabled' to " "'true' in the properties to enable it.") except BadRequest as e: log_func("Problem: " + e.description) sess.rollback() except BaseException: log_func("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() log_func("Finished checking TLM rates.")
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = self.global_alert = None try: sess = Session() self.log("Starting to check bmarketidx.") contract = Contract.get_non_core_by_name(sess, "bmarketidx") latest_rs = ( sess.query(RateScript) .filter(RateScript.contract_id == contract.id) .order_by(RateScript.start_date.desc()) .first() ) start_ct = to_ct(latest_rs.start_date) months = list( c_months_u( start_year=start_ct.year, start_month=start_ct.month, months=2, ) ) month_start, month_finish = months[1] now = utc_datetime_now() if now > month_finish: _process_month( self.log, sess, contract, latest_rs, month_start, month_finish, ) except BaseException: self.log(f"Outer problem {traceback.format_exc()}") sess.rollback() self.global_alert = ( "There's a problem with the " "bmarketidx automatic importer." ) finally: self.lock.release() self.log("Finished checking bmarketidx rates.") if sess is not None: sess.close() self.going.wait(2 * 60 * 60) self.going.clear()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.run_inner(sess) except BaseException: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking GCv rates.") self.going.wait(30 * 60) self.going.clear()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.run_inner(sess) except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking GCv rates.") self.going.wait(30 * 60) self.going.clear()
def run(self): sess = None try: sess = Session() for file_number, zfile in enumerate(self.zips): self.progress["file_number"] = file_number _process(sess, self.progress, zfile) except BadRequest as e: sess.rollback() with self.rd_lock: self.error_message = e.description except BaseException: sess.rollback() with self.rd_lock: self.error_message = traceback.format_exc() finally: if sess is not None: sess.close()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = self.global_alert = None try: sess = Session() fetch_cvs(sess, self.log) except BaseException: self.log("Outer problem " + traceback.format_exc()) self.global_alert = ( "There's a problem with the g_cv automatic importer.") sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking GCv rates.") self.going.wait(30 * 60) self.going.clear()
def run(self): sess = None try: sess = Session() contract = Contract.get_dc_by_id(sess, self.dc_contract_id) sess.rollback() properties = contract.make_properties() mpan_map = properties.get("mpan_map", {}) parser_module = importlib.import_module( "chellow.hh_parser_" + self.conv_ext[0][1:].replace(".", "_")) self.converter = parser_module.create_parser( self.istream, mpan_map) sess.rollback() HhDatum.insert(sess, self.converter, contract) sess.commit() except BadRequest as e: self.messages.append(e.description) except BaseException: self.messages.append("Outer problem " + traceback.format_exc()) finally: if sess is not None: sess.close()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = book = sbp_sheet = ssp_sheet = None try: sess = Session() self.log("Starting to check System Prices.") # ct_tz = pytz.timezone('Europe/London') contract = Contract.get_non_core_by_name( sess, 'system_price') contract_props = contract.make_properties() if contract_props.get('enabled', False): for rscript in sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()): ns = json.loads(rscript.script) rates = ns['gbp_per_nbp_mwh'] if len(rates) == 0: fill_start = rscript.start_date break elif rates[ key_format( rscript.finish_date)]['run'] == 'DF': fill_start = rscript.finish_date + HH break config = Contract.get_non_core_by_name( sess, 'configuration') config_props = config.make_properties() scripting_key = config_props.get( ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " "properties.") url_str = contract_props['url'] + \ 'file/download/BESTVIEWPRICES_FILE?key=' + \ scripting_key self.log( "Downloading from " + url_str + " and extracting data from " + hh_format(fill_start)) url = urllib.parse.urlparse(url_str) if url.scheme == 'https': conn = http.client.HTTPSConnection( url.hostname, url.port) else: conn = http.client.HTTPConnection( url.hostname, url.port) conn.request("GET", url.path + '?' + url.query) res = conn.getresponse() self.log( "Received " + str(res.status) + " " + res.reason) data = res.read() book = xlrd.open_workbook(file_contents=data) sbp_sheet = book.sheet_by_index(1) ssp_sheet = book.sheet_by_index(2) sp_months = [] sp_month = None for row_index in range(1, sbp_sheet.nrows): sbp_row = sbp_sheet.row(row_index) ssp_row = ssp_sheet.row(row_index) raw_date = datetime.datetime( *xlrd.xldate_as_tuple( sbp_row[0].value, book.datemode)) hh_date_ct = to_ct(raw_date) hh_date = to_utc(hh_date_ct) run_code = sbp_row[1].value for col_idx in range(2, 52): if hh_date >= fill_start: sbp_val = sbp_row[col_idx].value if sbp_val != '': if hh_date.day == 1 and \ hh_date.hour == 0 and \ hh_date.minute == 0: sp_month = {} sp_months.append(sp_month) ssp_val = ssp_row[col_idx].value sp_month[hh_date] = { 'run': run_code, 'sbp': sbp_val, 'ssp': ssp_val} hh_date += HH self.log("Successfully extracted data.") last_date = sorted(sp_months[-1].keys())[-1] if last_date.month == (last_date + HH).month: del sp_months[-1] if 'limit' in contract_props: sp_months = sp_months[0:1] for sp_month in sp_months: sorted_keys = sorted(sp_month.keys()) month_start = sorted_keys[0] month_finish = sorted_keys[-1] rs = sess.query(RateScript).filter( RateScript.contract == contract, RateScript.start_date == month_start).first() if rs is None: self.log( "Adding a new rate script starting at " + hh_format(month_start) + ".") latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).\ order_by(RateScript.start_date.desc()). \ first() contract.update_rate_script( sess, latest_rs, latest_rs.start_date, month_finish, latest_rs.script) rs = contract.insert_rate_script( sess, month_start, '') sess.flush() script = { 'gbp_per_nbp_mwh': dict( (key_format(k), v) for k, v in sp_month.items())} self.log( "Updating rate script starting at " + hh_format(month_start) + ".") contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, json.dumps( script, indent=' ', sort_keys=True)) sess.commit() else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: book = sbp_sheet = ssp_sheet = None self.lock.release() self.log("Finished checking System Price rates.") if sess is not None: sess.close() self.going.wait(24 * 60 * 60) self.going.clear()
def none_content(site_id, start_date, finish_date, user, file_name): sess = zf = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( file_name, user) sites = sess.query(Site).join(SiteEra).join(Era).filter( SiteEra.is_physical == true(), or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date) zf = zipfile.ZipFile(running_name, 'w') start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: buf = StringIO() writer = csv.writer(buf, lineterminator='\n') writer.writerow([ "Site Code", "Site Name", "Associated Site Codes", "Sources", "Generator Types", "From", "To", "Type", "Date" ] + list(map(str, range(1, 49)))) associates = ' '.join( s.code for s in site.find_linked_sites(sess, start_date, finish_date)) source_codes = set() gen_types = set() for supply in sess.query(Supply).join(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date)).distinct().options( joinedload(Supply.source), joinedload(Supply.generator_type)): source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) source_codes_str = ', '.join(sorted(source_codes)) gen_types_str = ', '.join(sorted(gen_types)) row = None for hh in site.hh_data(sess, start_date, finish_date): hh_start = hh['start_date'] if hh_start.hour == 0 and hh_start.minute == 0: if row is not None: writer.writerow(row) row = [ site.code, site.name, associates, source_codes_str, gen_types_str, start_date_str, finish_date_str, 'used', hh_start.strftime('%Y-%m-%d') ] used_gen_kwh = hh['imp_gen'] - hh['exp_net'] - hh['exp_gen'] used_3p_kwh = hh['imp_3p'] - hh['exp_3p'] used_kwh = hh['imp_net'] + used_gen_kwh + used_3p_kwh row.append(str(round(used_kwh, 2))) if row is not None: writer.writerow(row) zf.writestr( site.code + '_' + finish_date.strftime('%Y%m%d%M%H') + '.csv', buf.getvalue()) # Avoid long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) zf.write(msg) finally: if sess is not None: sess.close() if zf is not None: zf.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] w.writerow( ( 'Chellow Supply Id', 'Report Start', 'Report Finish', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() supply_from = hh_max(eras[0].start_date, year_start) supply_to = hh_min(eras[-1].finish_date, year_finish) for era in eras: meter_type = era.meter_category period_start = hh_max(era.start_date, year_start) period_finish = hh_min(era.finish_date, year_finish) max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and ( not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: block_start = hh_max(pair_start, period_start) block_finish = hh_min( pair_finish, period_finish) if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': year_seconds = ( year_finish - year_start).total_seconds() + 60 * 30 period_seconds = ( period_finish - period_start).total_seconds() + 60 * 30 total_kwh[meter_type] += era.imp_sc * period_seconds / \ year_seconds normal_days[meter_type] += period_seconds / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 w.writerow( [ supply.id, hh_format(year_start), hh_format(year_finish), mpan_core, site.code, site.name, hh_format(supply_from), hh_format(supply_to), breakdown] + [ normal_days[t] for t in meter_types] + [ max_normal_days[t] for t in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[t] for t in meter_types] + [filled_kwh[t] for t in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) # avoid a long running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def make_raw_bills(self): row_index = None sess = None try: sess = Session() bills = [] title_row = self.sheet.row(10) issue_date_str = get_str(self.sheet.row(6), 0) issue_date = Datetime.strptime(issue_date_str[6:], "%d/%m/%Y %H:%M:%S") for row_index in range(11, self.sheet.nrows): row = self.sheet.row(row_index) val = get_value(row, 1) if val is None or val == '': break self._set_last_line(row_index, val) mpan_core = parse_mpan_core(str(get_int(row, 1))) start_date = get_date(row, 3, self.book.datemode) finish_date = get_date(row, 4, self.book.datemode) + \ Timedelta(days=1) - HH era = sess.query(Era).filter( or_(Era.imp_mpan_core == mpan_core, Era.exp_mpan_core == mpan_core), Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date > start_date)).order_by( Era.start_date).first() if era is None: era = sess.query(Era).filter( or_(Era.imp_mpan_core == mpan_core, Era.exp_mpan_core == mpan_core)).order_by( Era.start_date.desc()).first() if era is None: account = mpan_core + '/DC' else: account = era.dc_account net = round(get_dec(row, 31), 2) cop_3_meters = get_int(row, 6) cop_3_rate = get_dec(row, 7) cop_3_gbp = get_dec(row, 8) # Cop 5 meters get_int(row, 9) cop_5_rate = get_dec(row, 10) cop_5_gbp = get_dec(row, 11) ad_hoc_visits = get_dec(row, 21) ad_hoc_rate = get_dec(row, 22) ad_hoc_gbp = get_dec(row, 23) annual_visits = get_int(row, 27) annual_rate = get_dec(row, 28) annual_gbp = get_dec(row, 29) annual_date = hh_format(get_date(row, 30, self.book.datemode)) if cop_3_meters > 0: cop = '3' mpan_rate = cop_3_rate mpan_gbp = cop_3_gbp else: cop = '5' mpan_rate = cop_5_rate mpan_gbp = cop_5_gbp breakdown = { 'raw_lines': [str(title_row)], 'cop': [cop], 'settlement-status': ['settlement'], 'mpan-rate': [mpan_rate], 'mpan-gbp': mpan_gbp, 'ad-hoc-visits': ad_hoc_visits, 'ad-hoc-rate': [ad_hoc_rate], 'ad-hoc-gbp': ad_hoc_gbp, 'annual-visits-count': annual_visits, 'annual-visits-rate': [annual_rate], 'annual-visits-gbp': annual_gbp, 'annual-visits-date': [annual_date] } bills.append({ 'bill_type_code': 'N', 'kwh': Decimal(0), 'vat': Decimal('0.00'), 'net': net, 'gross': net, 'reads': [], 'breakdown': breakdown, 'account': account, 'issue_date': issue_date, 'start_date': start_date, 'finish_date': finish_date, 'mpans': [mpan_core], 'reference': '_'.join((start_date.strftime('%Y%m%d'), finish_date.strftime('%Y%m%d'), issue_date.strftime('%Y%m%d'), mpan_core)) }) sess.rollback() except BadRequest as e: raise BadRequest("Row number: " + str(row_index) + " " + e.description) finally: if sess is not None: sess.close() return bills
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check RCRCs.") contract = Contract.get_non_core_by_name(sess, "rcrc") latest_rs = ( sess.query(RateScript) .filter(RateScript.contract_id == contract.id) .order_by(RateScript.start_date.desc()) .first() ) latest_rs_id = latest_rs.id latest_rs_start = latest_rs.start_date month_start = latest_rs_start + relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH now = utc_datetime_now() if now > month_finish: self.log( "Checking to see if data is available from " + hh_format(month_start) + " to " + hh_format(month_finish) + " on Elexon Portal." ) config = Contract.get_non_core_by_name(sess, "configuration") props = config.make_properties() scripting_key = props.get(ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " "properties." ) contract_props = contract.make_properties() url_str = "".join( ( contract_props["url"], "file/download/RCRC_FILE?key=", scripting_key, ) ) r = requests.get(url_str, timeout=60) parser = csv.reader( (x.decode() for x in r.iter_lines()), delimiter=",", quotechar='"', ) next(parser) next(parser) month_rcrcs = {} for values in parser: hh_date = utc_datetime_parse(values[0], "%d/%m/%Y") hh_date += relativedelta(minutes=30 * int(values[2])) if month_start <= hh_date <= month_finish: month_rcrcs[key_format(hh_date)] = Decimal(values[3]) if key_format(month_finish) in month_rcrcs: self.log("The whole month's data is there.") script = {"rates": month_rcrcs} contract = Contract.get_non_core_by_name(sess, "rcrc") rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, month_finish, loads(rs.script) ) contract.insert_rate_script(sess, month_start, script) sess.commit() self.log( "Added a new rate script starting at " + hh_format(month_start) + "." ) else: msg = "There isn't a whole month there yet." if len(month_rcrcs) > 0: msg += ( " The last date is " + sorted(month_rcrcs.keys())[-1] ) self.log(msg) except BaseException: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: self.lock.release() self.log("Finished checking RCRC rates.") if sess is not None: sess.close() self.going.wait(30 * 60) self.going.clear()
def content(site_id, g_supply_id, user, compression, finish_year, finish_month, months, now=None): if now is None: now = ct_datetime_now() report_context = {} sess = None month_list = list( c_months_u(finish_year=finish_year, finish_month=finish_month, months=months)) start_date, finish_date = month_list[0][0], month_list[-1][-1] try: sess = Session() base_name = [ "g_monthly_duration", hh_format(start_date).replace(" ", "_").replace(":", "").replace("-", ""), "for", str(months), "months", ] forecast_from = chellow.computer.forecast_date() sites = (sess.query(Site).join(SiteGEra).join(GEra).filter( SiteGEra.is_physical == true()).distinct().order_by(Site.code)) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append("site") base_name.append(site.code) if g_supply_id is not None: g_supply = GSupply.get_by_id(sess, g_supply_id) base_name.append("g_supply") base_name.append(str(g_supply.id)) sites = sites.filter(GEra.g_supply == g_supply) running_name, finished_name = chellow.dloads.make_names( "_".join(base_name) + ".ods", user) rf = open(running_name, "wb") site_rows = [] g_era_rows = [] era_header_titles = [ "creation_date", "mprn", "supply_name", "exit_zone", "msn", "unit", "contract", "site_id", "site_name", "associated_site_ids", "month", ] site_header_titles = [ "creation_date", "site_id", "site_name", "associated_site_ids", "month", ] summary_titles = ["kwh", "gbp", "billed_kwh", "billed_gbp"] vb_titles = [] conts = (sess.query(GContract).join(GEra).join(GSupply).filter( GEra.start_date <= finish_date, or_(GEra.finish_date == null(), GEra.finish_date >= start_date), ).distinct().order_by(GContract.id)) if g_supply_id is not None: conts = conts.filter(GEra.g_supply_id == g_supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, "virtual_bill_titles") if title_func is None: raise Exception("For the contract " + cont.name + " there doesn't seem " + "to be a 'virtual_bill_titles' function.") for title in title_func(): if title not in vb_titles: vb_titles.append(title) g_era_rows.append(era_header_titles + summary_titles + vb_titles) site_rows.append(site_header_titles + summary_titles) for month_start, month_finish in month_list: for site in sites.filter( GEra.start_date <= month_finish, or_(GEra.finish_date == null(), GEra.finish_date >= month_start), ): site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0 for g_era in (sess.query(GEra).join(SiteGEra).filter( SiteGEra.site == site, SiteGEra.is_physical == true(), GEra.start_date <= month_finish, or_(GEra.finish_date == null(), GEra.finish_date >= month_start), ).options( joinedload(GEra.g_contract), joinedload(GEra.g_supply), joinedload(GEra.g_supply).joinedload( GSupply.g_exit_zone), ).order_by(GEra.id)): g_supply = g_era.g_supply if g_supply_id is not None and g_supply.id != g_supply_id: continue ss_start = hh_max(g_era.start_date, month_start) ss_finish = hh_min(g_era.finish_date, month_finish) ss = GDataSource( sess, ss_start, ss_finish, forecast_from, g_era, report_context, None, ) contract = g_era.g_contract vb_function = contract_func(report_context, contract, "virtual_bill") if vb_function is None: raise BadRequest( "The contract " + contract.name + " doesn't have the virtual_bill() function.") vb_function(ss) bill = ss.bill try: gbp = bill["net_gbp"] except KeyError: gbp = 0 bill["problem"] += ( "For the supply " + ss.mprn + " the virtual bill " + str(bill) + " from the contract " + contract.name + " does not contain the net_gbp key.") try: kwh = bill["kwh"] except KeyError: kwh = 0 bill["problem"] += ("For the supply " + ss.mprn + " the virtual bill " + str(bill) + " from the contract " + contract.name + " does not contain the 'kwh' key.") billed_kwh = billed_gbp = 0 g_era_associates = { s.site.code for s in g_era.site_g_eras if not s.is_physical } for g_bill in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= ss_finish, GBill.finish_date >= ss_start, ): bill_start = g_bill.start_date bill_finish = g_bill.finish_date bill_duration = (bill_finish - bill_start ).total_seconds() + (30 * 60) overlap_duration = (min(bill_finish, ss_finish) - max( bill_start, ss_start)).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / bill_duration billed_kwh += overlap_proportion * float(g_bill.kwh) billed_gbp += overlap_proportion * float(g_bill.net) associated_site_ids = ",".join(sorted(g_era_associates)) g_era_rows.append([ make_val(v) for v in [ now, g_supply.mprn, g_supply.name, g_supply.g_exit_zone.code, g_era.msn, g_era.g_unit.code, contract.name, site.code, site.name, associated_site_ids, month_finish, kwh, gbp, billed_kwh, billed_gbp, ] ] + [make_val(bill.get(t)) for t in vb_titles]) site_kwh += kwh site_gbp += gbp site_billed_kwh += billed_kwh site_billed_gbp += billed_gbp linked_sites = ", ".join(s.code for s in site.find_linked_sites( sess, month_start, month_finish)) site_rows.append([ make_val(v) for v in [ now, site.code, site.name, linked_sites, month_finish, site_kwh, site_gbp, site_billed_kwh, site_billed_gbp, ] ]) sess.rollback() write_spreadsheet(rf, compression, site_rows, g_era_rows) except BadRequest as e: site_rows.append(["Problem " + e.description]) write_spreadsheet(rf, compression, site_rows, g_era_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + "\n") site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, g_era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names("error.txt", user) ef = open(r_name, "w") ef.write(msg + "\n") ef.close()
def content(year, supply_id, user): caches = {} sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "supplies_triad.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") march_start = to_utc(ct_datetime(year, 3, 1)) march_finish = to_utc(ct_datetime(year, 4, 1)) - HH nov_start = to_utc(ct_datetime(year - 1, 11, 1)) scalar_names = { "triad-actual-gsp-kw", "triad-actual-gbp", "triad-estimate-gsp-kw", "triad-estimate-months", "triad-estimate-gbp", "triad-all-estimates-months", "triad-all-estimates-gbp", } rate_names = {"triad-actual-rate", "triad-estimate-rate"} for i in range(1, 4): for p in ("triad-actual-", "triad-estimate-"): act_pref = p + str(i) + "-" for suf in ("msp-kw", "gsp-kw"): scalar_names.add(act_pref + suf) for suf in ("date", "status", "laf"): rate_names.add(act_pref + suf) def triad_csv(supply_source): if supply_source is None or supply_source.mpan_core.startswith( "99"): return [""] * 19 chellow.duos.duos_vb(supply_source) chellow.triad.hh(supply_source) for hh in supply_source.hh_data: bill_hh = supply_source.supplier_bill_hhs[hh["start-date"]] for k in scalar_names & hh.keys(): bill_hh[k] = hh[k] for k in rate_names & hh.keys(): bill_hh[k] = {hh[k]} bill = reduce_bill_hhs(supply_source.supplier_bill_hhs) values = [supply_source.mpan_core] for i in range(1, 4): triad_prefix = "triad-actual-" + str(i) for suffix in [ "-date", "-msp-kw", "-status", "-laf", "-gsp-kw" ]: values.append(csv_make_val(bill[triad_prefix + suffix])) suffixes = ["gsp-kw", "rate", "gbp"] values += [ csv_make_val(bill["triad-actual-" + suf]) for suf in suffixes ] return values writer.writerow(( "Site Code", "Site Name", "Supply Name", "Source", "Generator Type", "Import MPAN Core", "Import T1 Date", "Import T1 MSP kW", "Import T1 Status", "Import T1 LAF", "Import T1 GSP kW", "Import T2 Date", "Import T2 MSP kW", "Import T2 Status", "Import T2 LAF", "Import T2 GSP kW", "Import T3 Date", "Import T3 MSP kW", "Import T3 Status", "Import T3 LAF", "Import T3 GSP kW", "Import GSP kW", "Import Rate GBP / kW", "Import GBP", "Export MPAN Core", "Export T1 Date", "Export T1 MSP kW", "Export T1 Status", "Export T1 LAF", "Export T1 GSP kW", "Export T2 Date", "Export T2 MSP kW", "Export T2 Status", "Export T2 LAF", "Export T2 GSP kW", "Export T3 Date", "Export T3 MSP kW", "Export T3 Status", "Export T3 LAF", "Export T3 GSP kW", "Export GSP kW", "Export Rate GBP / kW", "Export GBP", )) forecast_date = chellow.computer.forecast_date() eras = _make_eras(sess, nov_start, march_finish, supply_id) for era in eras: site = (sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one()) supply = era.supply imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_supply_source = None else: imp_supply_source = chellow.computer.SupplySource( sess, march_start, march_finish, forecast_date, era, True, caches) exp_mpan_core = era.exp_mpan_core if exp_mpan_core is None: exp_supply_source = None else: exp_supply_source = chellow.computer.SupplySource( sess, march_start, march_finish, forecast_date, era, False, caches) gen_type = supply.generator_type gen_type = "" if gen_type is None else gen_type.code vals = [] for value in ([ site.code, site.name, supply.name, supply.source.code, gen_type ] + triad_csv(imp_supply_source) + triad_csv(exp_supply_source)): if isinstance(value, Datetime): vals.append(hh_format(value)) else: vals.append(str(value)) writer.writerow(vals) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, site_id, user): caches = {} sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'output.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("Site Code", "Site Name", "Displaced TRIAD 1 Date", "Displaced TRIAD 1 MSP kW", "Displaced TRIAD LAF", "Displaced TRIAD 1 GSP kW", "Displaced TRIAD 2 Date", "Displaced TRIAD 2 MSP kW", "Displaced TRIAD 2 LAF", "Displaced TRIAD 2 GSP kW", "Displaced TRIAD 3 Date", "Displaced TRIAD 3 MSP kW", "Displaced TRIAD 3 LAF", "Displaced TRIAD 3 GSP kW", "Displaced GSP kW", "Displaced Rate GBP / kW", "GBP")) march_finish = datetime.datetime(year, 4, 1, tzinfo=pytz.utc) - HH march_start = datetime.datetime(year, 3, 1, tzinfo=pytz.utc) forecast_date = chellow.computer.forecast_date() if site_id is None: sites = sess.query(Site).join(SiteEra).join(Era).join(Supply).join( Source).filter( Source.code.in_(('gen', 'gen-net')), Era.start_date <= march_finish, or_(Era.finish_date == null(), Era.finish_date >= march_start)).distinct() else: site = Site.get_by_id(sess, site_id) sites = sess.query(Site).filter(Site.id == site.id) for site in sites.order_by(Site.code): displaced_era = chellow.computer.displaced_era( sess, caches, site, march_start, march_finish, forecast_date) if displaced_era is None: continue site_ds = chellow.computer.SiteSource(sess, site, march_start, march_finish, forecast_date, caches, displaced_era) chellow.duos.duos_vb(site_ds) chellow.triad.hh(site_ds) chellow.triad.bill(site_ds) bill = site_ds.supplier_bill for rname, rset in site_ds.supplier_rate_sets.items(): if len(rset) == 1: bill[rname] = rset.pop() values = [site.code, site.name] for i in range(1, 4): triad_prefix = 'triad-actual-' + str(i) values.append(hh_format(bill[triad_prefix + '-date'])) for suffix in ['-msp-kw', '-laf', '-gsp-kw']: values.append(bill[triad_prefix + suffix]) values += [ str(bill['triad-actual-' + suf]) for suf in ['gsp-kw', 'rate', 'gbp'] ] writer.writerow(values) # Avoid long-running transaction sess.rollback() except BadRequest as e: writer.writerow([e.description]) except BaseException: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def run(self): sess = None try: sess = Session() batch = Batch.get_by_id(sess, self.batch_id) bill_types = keydefaultdict( lambda k: BillType.get_by_code(sess, k)) tprs = keydefaultdict(lambda k: None if k is None else Tpr.get_by_code(sess, k)) read_types = keydefaultdict( lambda k: ReadType.get_by_code(sess, k)) for bf in (sess.query(BatchFile).filter( BatchFile.batch == batch).order_by( BatchFile.upload_timestamp)): self.parser = _process_batch_file(sess, bf, self._log) for self.bill_num, raw_bill in enumerate( self.parser.make_raw_bills()): if "error" in raw_bill: self.failed_bills.append(raw_bill) else: try: mpan_core = raw_bill["mpan_core"] supply = Supply.get_by_mpan_core(sess, mpan_core) with sess.begin_nested(): bill = batch.insert_bill( sess, raw_bill["account"], raw_bill["reference"], raw_bill["issue_date"], raw_bill["start_date"], raw_bill["finish_date"], raw_bill["kwh"], raw_bill["net"], raw_bill["vat"], raw_bill["gross"], bill_types[raw_bill["bill_type_code"]], raw_bill["breakdown"], supply, ) for raw_read in raw_bill["reads"]: bill.insert_read( sess, tprs[raw_read["tpr_code"]], raw_read["coefficient"], raw_read["units"], raw_read["msn"], raw_read["mpan"], raw_read["prev_date"], raw_read["prev_value"], read_types[raw_read["prev_type_code"]], raw_read["pres_date"], raw_read["pres_value"], read_types[raw_read["pres_type_code"]], ) self.successful_bills.append(raw_bill) except KeyError as e: err = raw_bill.get("error", "") raw_bill["error"] = err + " " + str(e) self.failed_bills.append(raw_bill) except BadRequest as e: raw_bill["error"] = str(e.description) self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: sess.commit() self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: sess.rollback() self._log(f"The import has finished, but there were " f"{len(self.failed_bills)} failures, and so the " f"whole import has been rolled back.") except BadRequest as e: sess.rollback() self._log(f"Problem: {e.description}") except BaseException: sess.rollback() self._log(f"I've encountered a problem: {traceback.format_exc()}") finally: if sess is not None: sess.close()
def content(supply_id, start_date, finish_date, user): forecast_date = to_utc(Datetime.max) caches = {} f = sess = era = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') w.writerow( ( "Era Start", "Era Finish", "Supply Id", "Supply Name", "Source", "Generator Type", "Site Code", "Site Name", "Associated Site Codes", "From", "To", "PC", "MTC", "CoP", "SSC", "Properties", "MOP Contract", "MOP Account", "DC Contract", "DC Account", "Normal Reads", "Type", "Supply Start", "Supply Finish", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs")) eras = sess.query(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by( Era.supply_id, Era.start_date).options( joinedload(Era.supply), joinedload(Era.supply).joinedload(Supply.source), joinedload(Era.supply).joinedload(Supply.generator_type), joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level), joinedload(Era.imp_llfc), joinedload(Era.exp_llfc), joinedload(Era.mop_contract), joinedload(Era.dc_contract), joinedload(Era.channels), joinedload(Era.site_eras).joinedload(SiteEra.site), joinedload(Era.pc), joinedload(Era.cop), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.ssc), joinedload(Era.site_eras)) if supply_id is not None: eras = eras.filter(Era.supply == Supply.get_by_id(sess, supply_id)) for era in eras: supply = era.supply site_codes = set() site = None for site_era in era.site_eras: if site_era.is_physical: site = site_era.site else: site_codes.add(site_era.site.code) sup_eras = sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all() supply_start = sup_eras[0].start_date supply_finish = sup_eras[-1].finish_date if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)).options( joinedload(RegisterRead.bill)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)).options( joinedload(RegisterRead.bill))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add(str(rdate) + "_" + read.msn) supply_type = era.meter_category chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) num_hh = int( (chunk_finish + HH - chunk_start).total_seconds() / (30 * 60)) w.writerow( [ hh_format(era.start_date), hh_format(era.finish_date, ongoing_str=''), supply.id, supply.name, supply.source.code, generator_type, site.code, site.name, '| '.join(sorted(site_codes)), hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, era.properties, era.mop_contract.name, era.mop_account, era.dc_contract.name, era.dc_account, len(prime_reads), supply_type, hh_format(supply_start), hh_format(supply_finish, ongoing_str='')] + mpan_bit( sess, supply, True, num_hh, era, chunk_start, chunk_finish, forecast_date, caches) + mpan_bit( sess, supply, False, num_hh, era, chunk_start, chunk_finish, forecast_date, caches)) # Avoid a long-running transaction sess.rollback() except BadRequest as e: if era is None: pref = "Problem: " else: pref = "Problem with era " + chellow.utils.url_root + "eras/" + \ str(era.id) + "/edit : " f.write(pref + e.description) except BaseException as e: if era is None: pref = "Problem: " else: pref = "Problem with era " + str(era.id) + ": " f.write(pref + str(e)) f.write(traceback.format_exc()) finally: sess.close() f.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'register_reads.csv', user) f = open(running_name, mode='w', newline='') w = csv.writer(f, lineterminator='\n') w.writerow( ('Duration Start', 'Duration Finish', 'Supply Id', 'Import MPAN Core', 'Export MPAN Core', 'Batch Reference', 'Bill Id', 'Bill Reference', 'Bill Issue Date', 'Bill Type', 'Register Read Id', 'TPR', 'Coefficient', 'Previous Read Date', 'Previous Read Value', 'Previous Read Type', 'Present Read Date', 'Present Read Value', 'Present Read Type')) finish_date = utc_datetime(year, month, 1) + \ relativedelta(months=1) - HH start_date = utc_datetime(year, month, 1) - \ relativedelta(months=months-1) reads = sess.query(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date))) \ .join(Bill).order_by(Bill.supply_id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) reads = reads.filter(Bill.supply == supply) for read in reads: bill = read.bill supply = bill.supply batch = bill.batch era = supply.find_era_at(sess, bill.start_date) if era is None: eras = sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all() if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] w.writerow(('' if val is None else val) for val in [ hh_format(start_date), hh_format(finish_date), supply.id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, hh_format(bill.issue_date), bill.bill_type.code, read.id, 'md' if read.tpr is None else read.tpr.code, read.coefficient, hh_format(read.previous_date), read.previous_value, read.previous_type.code, hh_format(read.present_date), read.present_value, read.present_type.code ]) # Avoid a long-running transaction sess.rollback() except BadRequest as e: w.writerow([e.description]) except BaseException: msg = traceback.format_exc() f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def make_raw_bills(self): row_index = None sess = None try: sess = Session() bills = [] title_row = self.sheet.row(0) for row_index in range(1, self.sheet.nrows): row = self.sheet.row(row_index) val = get_value(title_row, row, 'mpan ref') if val is None or val == '': break self._set_last_line(row_index, val) msn = str(get_value(title_row, row, 'meter')).strip() mpan_core = parse_mpan_core( str(get_int(title_row, row, 'mpan ref'))) start_date = get_date( title_row, row, 'start', self.book.datemode) issue_date = start_date finish_date = get_date( title_row, row, 'end', self.book.datemode) + Timedelta( days=1) - HH check = get_str(title_row, row, 'check') if check != 'Billed': continue era = sess.query(Era).filter( or_( Era.imp_mpan_core == mpan_core, Era.exp_mpan_core == mpan_core), Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date > start_date)).order_by( Era.start_date).first() if era is None: era = sess.query(Era).filter( or_( Era.imp_mpan_core == mpan_core, Era.exp_mpan_core == mpan_core)).order_by( Era.start_date.desc()).first() if era is None: account = mpan_core + '/DC' else: account = era.dc_account net = METER_RATE / 12 breakdown = { 'raw_lines': [str(title_row)], 'cop': ['5'], 'settlement-status': ['non_settlement'], 'msn': [msn], 'meter-rate': [METER_RATE], 'meter-gbp': net, } bills.append( { 'bill_type_code': 'N', 'kwh': Decimal(0), 'vat': Decimal('0.00'), 'net': net, 'gross': net, 'reads': [], 'breakdown': breakdown, 'account': account, 'issue_date': issue_date, 'start_date': start_date, 'finish_date': finish_date, 'mpans': [mpan_core], 'reference': '_'.join( ( start_date.strftime('%Y%m%d'), finish_date.strftime('%Y%m%d'), issue_date.strftime('%Y%m%d'), mpan_core ) ) } ) sess.rollback() except BadRequest as e: raise BadRequest( "Row number: " + str(row_index) + " " + e.description) finally: if sess is not None: sess.close() return bills
def content(running_name, finished_name, date, supply_id, mpan_cores): sess = None try: sess = Session() f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Date", "Import MPAN Core", "Export MPAN Core", "Physical Site Id", "Physical Site Name", "Other Site Ids", "Other Site Names", "Supply Id", "Source", "Generator Type", "GSP Group", "DNO Name", "Voltage Level", "Is Substations", "Metering Type", "Mandatory HH", "PC", "MTC", "CoP", "SSC Code", "SSC Description", "Energisation Status", "Number Of Registers", "MOP Contract", "Mop Account", "DC Contract", "DC Account", "Meter Serial Number", "Meter Installation Date", "Latest Normal Meter Read Date", "Latest Normal Meter Read Type", "Latest DC Bill Date", "Latest MOP Bill Date", "Supply Start Date", "Supply Finish Date", "Properties", "Import ACTIVE?", "Import REACTIVE_IMPORT?", "Import REACTIVE_EXPORT?", "Export ACTIVE?", "Export REACTIVE_IMPORT?", "Export REACTIVE_EXPORT?", "Import Agreed Supply Capacity (kVA)", "Import LLFC Code", "Import LLFC Description", "Import Supplier Contract", "Import Supplier Account", "Import Mandatory kW", "Latest Import Supplier Bill Date", "Export Agreed Supply Capacity (kVA)", "Export LLFC Code", "Export LLFC Description", "Export Supplier Contract", "Export Supplier Account", "Export Mandatory kW", "Latest Export Supplier Bill Date", ) writer.writerow(titles) NORMAL_READ_TYPES = ("N", "C", "N3") year_start = date + HH - relativedelta(years=1) era_ids = (sess.query(Era.id).filter( Era.start_date <= date, or_(Era.finish_date == null(), Era.finish_date >= date), ).order_by(Era.supply_id)) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) era_ids = era_ids.filter(Era.supply == supply) if mpan_cores is not None: era_ids = era_ids.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) for (era_id, ) in era_ids: era, supply, generator_type = (sess.query( Era, Supply, GeneratorType).join( Supply, Era.supply_id == Supply.id).outerjoin( GeneratorType, Supply.generator_type_id == GeneratorType.id).filter( Era.id == era_id).options( joinedload(Era.channels), joinedload(Era.cop), joinedload(Era.dc_contract), joinedload(Era.exp_llfc), joinedload(Era.exp_supplier_contract), joinedload(Era.imp_llfc), joinedload(Era.imp_supplier_contract), joinedload(Era.mop_contract), joinedload(Era.mtc), joinedload(Era.mtc).joinedload(Mtc.meter_type), joinedload(Era.pc), joinedload(Era.site_eras).joinedload( SiteEra.site), joinedload(Era.ssc), joinedload(Era.energisation_status), joinedload(Era.supply).joinedload( Supply.source), joinedload(Era.supply).joinedload( Supply.gsp_group), joinedload(Era.supply).joinedload(Supply.dno), ).one()) site_codes = [] site_names = [] for site_era in era.site_eras: if site_era.is_physical: physical_site = site_era.site else: site = site_era.site site_codes.append(site.code) site_names.append(site.name) sup_eras = (sess.query(Era).filter(Era.supply == supply).order_by( Era.start_date).all()) supply_start_date = sup_eras[0].start_date supply_finish_date = sup_eras[-1].finish_date if era.imp_mpan_core is None: voltage_level_code = era.exp_llfc.voltage_level.code is_substation = era.exp_llfc.is_substation else: voltage_level_code = era.imp_llfc.voltage_level.code is_substation = era.imp_llfc.is_substation if generator_type is None: generator_type_str = "" else: generator_type_str = generator_type.code metering_type = era.meter_category if metering_type in ("nhh", "amr"): latest_prev_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.previous_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.previous_date <= date, Bill.supply_id == supply.id, ).order_by(RegisterRead.previous_date.desc()).options( joinedload(RegisterRead.previous_type)).first()) latest_pres_normal_read = ( sess.query(RegisterRead).join(Bill).join( RegisterRead.present_type).filter( ReadType.code.in_(NORMAL_READ_TYPES), RegisterRead.present_date <= date, Bill.supply == supply, ).order_by(RegisterRead.present_date.desc()).options( joinedload(RegisterRead.present_type)).first()) if latest_prev_normal_read is None and latest_pres_normal_read is None: latest_normal_read_date = None latest_normal_read_type = None elif (latest_pres_normal_read is not None and latest_prev_normal_read is None): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code elif (latest_pres_normal_read is None and latest_prev_normal_read is not None): latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code elif (latest_pres_normal_read.present_date > latest_prev_normal_read.previous_date): latest_normal_read_date = latest_pres_normal_read.present_date latest_normal_read_type = latest_pres_normal_read.present_type.code else: latest_normal_read_date = latest_prev_normal_read.previous_date latest_normal_read_type = latest_prev_normal_read.previous_type.code if latest_normal_read_date is not None: latest_normal_read_date = hh_format( latest_normal_read_date) else: latest_normal_read_date = metering_type latest_normal_read_type = None mop_contract = era.mop_contract mop_contract_name = mop_contract.name mop_account = era.mop_account latest_mop_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == mop_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_mop_bill_date is not None: latest_mop_bill_date = hh_format(latest_mop_bill_date[0]) dc_contract = era.dc_contract dc_contract_name = dc_contract.name dc_account = era.dc_account latest_dc_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.start_date <= date, Bill.supply == supply, Batch.contract == dc_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_dc_bill_date is not None: latest_dc_bill_date = hh_format(latest_dc_bill_date[0]) channel_values = [] for imp_related in [True, False]: for channel_type in CHANNEL_TYPES: if era.find_channel(sess, imp_related, channel_type) is None: channel_values.append("false") else: channel_values.append("true") imp_avg_months = None exp_avg_months = None for is_import in [True, False]: if metering_type == "nhh": continue params = { "supply_id": supply.id, "year_start": year_start, "year_finish": date, "is_import": is_import, } month_mds = tuple(md[0] * 2 for md in sess.execute( """ select max(hh_datum.value) as md from hh_datum join channel on (hh_datum.channel_id = channel.id) join era on (channel.era_id = era.id) where era.supply_id = :supply_id and hh_datum.start_date >= :year_start and hh_datum.start_date <= :year_finish and channel.channel_type = 'ACTIVE' and channel.imp_related = :is_import group by extract(month from (hh_datum.start_date at time zone 'utc')) order by md desc limit 3 """, params=params, )) avg_months = sum(month_mds) if len(month_mds) > 0: avg_months /= len(month_mds) if is_import: imp_avg_months = avg_months else: exp_avg_months = avg_months if (imp_avg_months is not None and imp_avg_months > 100) or (exp_avg_months is not None and exp_avg_months > 100): mandatory_hh = "yes" else: mandatory_hh = "no" imp_latest_supplier_bill_date = None exp_latest_supplier_bill_date = None for is_import in (True, False): for er in (sess.query(Era).filter( Era.supply == era.supply, Era.start_date <= date).order_by( Era.start_date.desc())): if is_import: if er.imp_mpan_core is None: break else: supplier_contract = er.imp_supplier_contract else: if er.exp_mpan_core is None: break else: supplier_contract = er.exp_supplier_contract latest_bill_date = (sess.query( Bill.finish_date).join(Batch).filter( Bill.finish_date >= er.start_date, Bill.finish_date <= hh_min(er.finish_date, date), Bill.supply == supply, Batch.contract == supplier_contract, ).order_by(Bill.finish_date.desc()).first()) if latest_bill_date is not None: latest_bill_date = hh_format(latest_bill_date[0]) if is_import: imp_latest_supplier_bill_date = latest_bill_date else: exp_latest_supplier_bill_date = latest_bill_date break meter_installation_date = (sess.query(func.min( Era.start_date)).filter(Era.supply == era.supply, Era.msn == era.msn).one()[0]) ssc = era.ssc if ssc is None: ssc_code = ssc_description = num_registers = None else: ssc_code, ssc_description = ssc.code, ssc.description num_registers = (sess.query(MeasurementRequirement).filter( MeasurementRequirement.ssc == ssc).count()) vals = ([ date, era.imp_mpan_core, era.exp_mpan_core, physical_site.code, physical_site.name, ", ".join(site_codes), ", ".join(site_names), supply.id, supply.source.code, generator_type_str, supply.gsp_group.code, supply.dno.dno_code, voltage_level_code, is_substation, metering_type, mandatory_hh, era.pc.code, era.mtc.code, era.cop.code, ssc_code, ssc_description, era.energisation_status.code, num_registers, mop_contract_name, mop_account, dc_contract_name, dc_account, era.msn, meter_installation_date, latest_normal_read_date, latest_normal_read_type, latest_dc_bill_date, latest_mop_bill_date, supply_start_date, supply_finish_date, era.properties, ] + channel_values + [ era.imp_sc, None if era.imp_llfc is None else era.imp_llfc.code, None if era.imp_llfc is None else era.imp_llfc.description, None if era.imp_supplier_contract is None else era.imp_supplier_contract.name, era.imp_supplier_account, imp_avg_months, imp_latest_supplier_bill_date, ] + [ era.exp_sc, None if era.exp_llfc is None else era.exp_llfc.code, None if era.exp_llfc is None else era.exp_llfc.description, None if era.exp_supplier_contract is None else era.exp_supplier_contract.name, era.exp_supplier_account, exp_avg_months, exp_latest_supplier_bill_date, ]) writer.writerow([csv_make_val(v) for v in vals]) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = book = sbp_sheet = ssp_sheet = None try: sess = Session() self.log("Starting to check System Prices.") # ct_tz = pytz.timezone('Europe/London') contract = Contract.get_non_core_by_name( sess, 'system_price') contract_props = contract.make_properties() if contract_props.get('enabled', False): for rscript in sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()): ns = loads(rscript.script) rates = ns['gbp_per_nbp_mwh'] if len(rates) == 0: fill_start = rscript.start_date break elif rates[key_format( rscript.finish_date)]['run'] == 'DF': fill_start = rscript.finish_date + HH break config = Contract.get_non_core_by_name( sess, 'configuration') config_props = config.make_properties() scripting_key = config_props.get( ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " "properties.") url_str = contract_props['url'] + \ 'file/download/BESTVIEWPRICES_FILE?key=' + \ scripting_key self.log("Downloading from " + url_str + " and extracting data from " + hh_format(fill_start)) url = urllib.parse.urlparse(url_str) if url.scheme == 'https': conn = http.client.HTTPSConnection( url.hostname, url.port) else: conn = http.client.HTTPConnection( url.hostname, url.port) conn.request("GET", url.path + '?' + url.query) res = conn.getresponse() self.log("Received " + str(res.status) + " " + res.reason) data = res.read() book = xlrd.open_workbook(file_contents=data) sbp_sheet = book.sheet_by_index(1) ssp_sheet = book.sheet_by_index(2) sp_months = [] sp_month = None for row_index in range(1, sbp_sheet.nrows): sbp_row = sbp_sheet.row(row_index) ssp_row = ssp_sheet.row(row_index) raw_date = datetime.datetime(*xlrd.xldate_as_tuple( sbp_row[0].value, book.datemode)) hh_date_ct = to_ct(raw_date) hh_date = to_utc(hh_date_ct) run_code = sbp_row[1].value for col_idx in range(2, 52): if hh_date >= fill_start: sbp_val = sbp_row[col_idx].value if sbp_val != '': if hh_date.day == 1 and \ hh_date.hour == 0 and \ hh_date.minute == 0: sp_month = {} sp_months.append(sp_month) ssp_val = ssp_row[col_idx].value sp_month[hh_date] = { 'run': run_code, 'sbp': sbp_val, 'ssp': ssp_val } hh_date += HH self.log("Successfully extracted data.") last_date = sorted(sp_months[-1].keys())[-1] if last_date.month == (last_date + HH).month: del sp_months[-1] if 'limit' in contract_props: sp_months = sp_months[0:1] for sp_month in sp_months: sorted_keys = sorted(sp_month.keys()) month_start = sorted_keys[0] month_finish = sorted_keys[-1] rs = sess.query(RateScript).filter( RateScript.contract == contract, RateScript.start_date == month_start).first() if rs is None: self.log( "Adding a new rate script starting at " + hh_format(month_start) + ".") latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).\ order_by(RateScript.start_date.desc()). \ first() contract.update_rate_script( sess, latest_rs, latest_rs.start_date, month_finish, loads(latest_rs.script)) rs = contract.insert_rate_script( sess, month_start, {}) sess.flush() script = { 'gbp_per_nbp_mwh': dict((key_format(k), v) for k, v in sp_month.items()) } self.log("Updating rate script starting at " + hh_format(month_start) + ".") contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, script) sess.commit() else: self.log("The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except BaseException: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: book = sbp_sheet = ssp_sheet = None self.lock.release() self.log("Finished checking System Price rates.") if sess is not None: sess.close() self.going.wait(24 * 60 * 60) self.going.clear()
def run(self): sess = None try: sess = Session() self._log( "Starting to parse the file with '" + self.parser_name + "'.") bill_types = keydefaultdict( lambda k: BillType.get_by_code(sess, k)) tprs = keydefaultdict( lambda k: None if k is None else Tpr.get_by_code(sess, k)) read_types = keydefaultdict( lambda k: ReadType.get_by_code(sess, k)) batch = Batch.get_by_id(sess, self.batch_id) contract = batch.contract raw_bills = self.parser.make_raw_bills() self._log( "Successfully parsed the file, and now I'm starting to " "insert the raw bills.") for self.bill_num, raw_bill in enumerate(raw_bills): try: account = raw_bill['account'] supply = sess.query(Supply).join(Era).filter( or_( and_( Era.imp_supplier_contract == contract, Era.imp_supplier_account == account), and_( Era.exp_supplier_contract == contract, Era.exp_supplier_account == account), and_( Era.mop_contract == contract, Era.mop_account == account), and_( Era.hhdc_contract == contract, Era.hhdc_account == account)) ).distinct().order_by(Supply.id).first() if supply is None: raise BadRequest( "Can't find an era with contract '" + contract.name + "' and account '" + account + "'.") with sess.begin_nested(): bill = batch.insert_bill( sess, account, raw_bill['reference'], raw_bill['issue_date'], raw_bill['start_date'], raw_bill['finish_date'], raw_bill['kwh'], raw_bill['net'], raw_bill['vat'], raw_bill['gross'], bill_types[raw_bill['bill_type_code']], raw_bill['breakdown'], supply) for raw_read in raw_bill['reads']: bill.insert_read( sess, tprs[raw_read['tpr_code']], raw_read['coefficient'], raw_read['units'], raw_read['msn'], raw_read['mpan'], raw_read['prev_date'], raw_read['prev_value'], read_types[raw_read['prev_type_code']], raw_read['pres_date'], raw_read['pres_value'], read_types[raw_read['pres_type_code']]) self.successful_bills.append(raw_bill) except BadRequest as e: raw_bill['error'] = str(e.description) self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: sess.commit() self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: sess.rollback() self._log( "The import has finished, but there were " + str(len(self.failed_bills)) + " failures, and so the " "whole import has been rolled back.") except: sess.rollback() self._log("I've encountered a problem: " + traceback.format_exc()) finally: if sess is not None: sess.close()
def content(base_name, site_id, g_supply_id, user, compression, start_date, months): now = utc_datetime_now() report_context = {} sess = None try: sess = Session() base_name.append( hh_format(start_date).replace(' ', '_').replace(':', '').replace('-', '')) base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) forecast_from = chellow.computer.forecast_date() sites = sess.query(Site).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if g_supply_id is not None: g_supply = GSupply.get_by_id(sess, g_supply_id) base_name.append('g_supply') base_name.append(str(g_supply.id)) sites = sites.join(SiteGEra).join(GEra).filter( GEra.g_supply == g_supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") site_rows = [] g_era_rows = [] era_header_titles = [ 'creation_date', 'mprn', 'supply_name', 'exit_zone', 'msn', 'unit', 'contract', 'site_id', 'site_name', 'associated_site_ids', 'month' ] site_header_titles = [ 'creation_date', 'site_id', 'site_name', 'associated_site_ids', 'month' ] summary_titles = ['kwh', 'gbp', 'billed_kwh', 'billed_gbp'] vb_titles = [] conts = sess.query(GContract).join(GEra).join(GSupply).filter( GEra.start_date <= finish_date, or_(GEra.finish_date == null(), GEra.finish_date >= start_date)).distinct().order_by( GContract.id) if g_supply_id is not None: conts = conts.filter(GEra.g_supply_id == g_supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles') if title_func is None: raise Exception("For the contract " + cont.name + " there doesn't seem " + "to be a 'virtual_bill_titles' function.") for title in title_func(): if title not in vb_titles: vb_titles.append(title) g_era_rows.append(era_header_titles + summary_titles + vb_titles) site_rows.append(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0 for g_era in sess.query(GEra).join(SiteGEra).filter( SiteGEra.site == site, SiteGEra.is_physical == true(), GEra.start_date <= month_finish, or_(GEra.finish_date == null(), GEra.finish_date >= month_start)).options( joinedload(GEra.g_contract), joinedload(GEra.g_supply), joinedload(GEra.g_supply).joinedload( GSupply.g_exit_zone)).order_by(GEra.id): g_supply = g_era.g_supply if g_supply_id is not None and g_supply.id != g_supply_id: continue ss_start = hh_max(g_era.start_date, month_start) ss_finish = hh_min(g_era.finish_date, month_finish) ss = GDataSource(sess, ss_start, ss_finish, forecast_from, g_era, report_context, None) contract = g_era.g_contract vb_function = contract_func(report_context, contract, 'virtual_bill') if vb_function is None: raise BadRequest( "The contract " + contract.name + " doesn't have the virtual_bill() function.") vb_function(ss) bill = ss.bill try: gbp = bill['net_gbp'] except KeyError: gbp = 0 bill['problem'] += 'For the supply ' + ss.mprn + \ ' the virtual bill ' + str(bill) + \ ' from the contract ' + contract.name + \ ' does not contain the net_gbp key.' try: kwh = bill['kwh'] except KeyError: kwh = 0 bill['problem'] += "For the supply " + ss.mprn + \ " the virtual bill " + str(bill) + \ " from the contract " + contract.name + \ " does not contain the 'kwh' key." billed_kwh = billed_gbp = 0 g_era_associates = { s.site.code for s in g_era.site_g_eras if not s.is_physical } for g_bill in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= ss_finish, GBill.finish_date >= ss_start): bill_start = g_bill.start_date bill_finish = g_bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = (min(bill_finish, ss_finish) - max( bill_start, ss_start)).total_seconds() + (30 * 60) overlap_proportion = overlap_duration / bill_duration billed_kwh += overlap_proportion * float(g_bill.kwh) billed_gbp += overlap_proportion * float(g_bill.net) associated_site_ids = ','.join(sorted(g_era_associates)) g_era_rows.append([ now, g_supply.mprn, g_supply.name, g_supply.g_exit_zone .code, g_era.msn, g_era.g_unit.code, contract.name, site.code, site.name, associated_site_ids, month_finish, kwh, gbp, billed_kwh, billed_gbp ] + [make_val(bill.get(t)) for t in vb_titles]) site_kwh += kwh site_gbp += gbp site_billed_kwh += billed_kwh site_billed_gbp += billed_gbp linked_sites = ', '.join(s.code for s in site.find_linked_sites( sess, month_start, month_finish)) site_rows.append([ now, site.code, site.name, linked_sites, month_finish, site_kwh, site_gbp, site_billed_kwh, site_billed_gbp ]) sess.rollback() write_spreadsheet(rf, compression, site_rows, g_era_rows) month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, g_era_rows) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg + '\n') site_rows.append(["Problem " + msg]) write_spreadsheet(rf, compression, site_rows, g_era_rows) finally: if sess is not None: sess.close() try: rf.close() os.rename(running_name, finished_name) except BaseException: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, "w") ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] f.write( ','.join( ( 'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) + '\n') year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)): meter_type = era.make_meter_category() era_start = era.start_date period_start = era_start \ if era_start > year_start else year_start era_finish = era.finish_date if hh_after(era_finish, year_finish): period_finish = year_finish else: period_finish = era_finish max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and \ (not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: if pair_start > period_start: block_start = pair_start else: block_start = period_start if pair_finish < period_finish: block_finish = pair_finish else: block_finish = period_finish if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': bills = sess.query(Bill).filter( Bill.supply == supply, Bill.finish_date >= period_start, Bill.start_date <= period_finish) for bill in bills: total_kwh[meter_type] += kwh normal_days[meter_type] += ( ( period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 f.write( ','.join( '"' + str(val) + '"' for val in [ supply.id, mpan_core, site.code, site.name, hh_format(year_start), hh_format(year_finish), breakdown] + [ normal_days[type] for type in meter_types] + [ max_normal_days[type] for type in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[type] for type in meter_types] + [filled_kwh[type] for type in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) + '\n') # avoid a long running transaction sess.rollback() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check bank holidays") contract = Contract.get_non_core_by_name( sess, 'bank_holidays') contract_props = contract.make_properties() if contract_props.get('enabled', False): url_str = contract_props['url'] self.log("Downloading from " + url_str + ".") res = requests.get(url_str) self.log( ' '.join( ( "Received", str(res.status_code), res.reason))) PREFIX = 'DTSTART;VALUE=DATE:' hols = collections.defaultdict(list) for line in res.text.splitlines(): if line.startswith(PREFIX): dt = utc_datetime_parse(line[-8:], "%Y%m%d") hols[dt.year].append(dt) for year in sorted(hols.keys()): year_start = utc_datetime(year, 1, 1) year_finish = year_start + \ relativedelta(years=1) - HH rs = sess.query(RateScript).filter( RateScript.contract == contract, RateScript.start_date == year_start).first() if rs is None: self.log( "Adding a new rate script starting at " + hh_format(year_start) + ".") latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).\ order_by(RateScript.start_date.desc()). \ first() contract.update_rate_script( sess, latest_rs, latest_rs.start_date, year_finish, latest_rs.script) rs = contract.insert_rate_script( sess, year_start, '') script = { 'bank_holidays': [ v.strftime("%Y-%m-%d") for v in hols[year]]} self.log( "Updating rate script starting at " + hh_format(year_start) + ".") contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, json.dumps( script, indent=' ', sort_keys=True)) sess.commit() else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking bank holidays.") self.going.wait(24 * 60 * 60) self.going.clear()
def content(start_date, finish_date, imp_related, channel_type, is_zipped, supply_id, mpan_cores, user): zf = sess = tf = None base_name = ["supplies_hh_data", finish_date.strftime('%Y%m%d%H%M')] cache = {} try: sess = Session() supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = sess.query(Era).filter( Era.supply == supply, or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by( Era.start_date).first() if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(' ', '_')) if mpan_cores is not None: supplies = supplies.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append('filter') outs = [] titles = ','.join([ 'Import MPAN Core', 'Export MPAN Core', 'Import Related?', 'Channel Type', 'Date' ] + list(map(str, range(48)))) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + ('.zip' if is_zipped else '.csv'), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, "w") outs.append(titles) for supply in supplies: era = supply.find_era_at(sess, finish_date) if era is None: imp_mpan_core_str = exp_mpan_core_str = 'NA' else: if era.imp_mpan_core is None: imp_mpan_core_str = "NA" else: imp_mpan_core_str = era.imp_mpan_core if era.exp_mpan_core is None: exp_mpan_core_str = "NA" else: exp_mpan_core_str = era.exp_mpan_core imp_related_str = "TRUE" if imp_related else "FALSE" hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type).order_by( HhDatum.start_date)) datum = next(hh_data, None) for current_date in hh_range(cache, start_date, finish_date): if current_date.hour == 0 and current_date.minute == 0: outs.append("\n" + imp_mpan_core_str + "," + exp_mpan_core_str + "," + imp_related_str + "," + channel_type + "," + current_date.strftime('%Y-%m-%d')) outs.append(",") if datum is not None and datum.start_date == current_date: outs.append(str(datum.value)) datum = next(hh_data, None) if is_zipped: fname = '_'.join((imp_mpan_core_str, exp_mpan_core_str, str(supply.id) + '.csv')) zf.writestr(fname.encode('ascii'), titles + ''.join(outs)) else: tf.write(''.join(outs)) outs = [] # Avoid long-running transaction sess.rollback() if is_zipped: zf.close() else: tf.close() except BaseException: msg = traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) zf.close() else: tf.write(msg) finally: if sess is not None: sess.close() os.rename(running_name, finished_name)
def none_content(site_codes, typ, start_date, finish_date, user, file_name): sess = zf = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( file_name, user) sites = (sess.query(Site).join(SiteEra).join(Era).filter( SiteEra.is_physical == true(), or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, )) if site_codes is not None: sites = sites.filter(Site.code.in_(site_codes)) zf = zipfile.ZipFile(running_name, "w") start_date_str = hh_format(start_date) finish_date_str = hh_format(finish_date) for site in sites: buf = StringIO() writer = csv.writer(buf, lineterminator="\n") writer.writerow([ "Site Code", "Site Name", "Associated Site Codes", "Sources", "Generator Types", "From", "To", "Type", "Date", ] + list(map(str, range(1, 51)))) associates = " ".join( s.code for s in site.find_linked_sites(sess, start_date, finish_date)) source_codes = set() gen_types = set() for supply in (sess.query(Supply).join(Era).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.site == site, Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ).distinct().options(joinedload(Supply.source), joinedload(Supply.generator_type))): source_codes.add(supply.source.code) gen_type = supply.generator_type if gen_type is not None: gen_types.add(gen_type.code) source_codes_str = ", ".join(sorted(source_codes)) gen_types_str = ", ".join(sorted(gen_types)) row = None for hh in site.hh_data(sess, start_date, finish_date): ct_start_date = to_ct(hh["start_date"]) if ct_start_date.hour == 0 and ct_start_date.minute == 0: if row is not None: writer.writerow(row) row = [ site.code, site.name, associates, source_codes_str, gen_types_str, start_date_str, finish_date_str, typ, ct_start_date.strftime("%Y-%m-%d"), ] row.append(str(round(hh[typ], 2))) if row is not None: writer.writerow(row) zf.writestr( f"{site.code}_{finish_date.strftime('%Y%m%d%M%H')}.csv", buf.getvalue(), ) # Avoid long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) zf.write(msg) finally: if sess is not None: sess.close() if zf is not None: zf.close() os.rename(running_name, finished_name)
def content( start_date_ct, finish_date_ct, imp_related, channel_type, is_zipped, supply_id, mpan_cores, user, ): start_date, finish_date = to_utc(start_date_ct), to_utc(finish_date_ct) zf = sess = tf = None base_name = ["supplies_hh_data", finish_date_ct.strftime("%Y%m%d%H%M")] cache = {} try: sess = Session() supplies = (sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, ).order_by(Supply.id).distinct()) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = (sess.query(Era).filter( Era.supply == supply, or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, ).order_by(Era.start_date).first()) if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(" ", "_")) if mpan_cores is not None: supplies = supplies.filter( or_(Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append("filter") cf = StringIO() writer = csv.writer(cf, lineterminator="\n") titles = [ "Import MPAN Core", "Export MPAN Core", "Import Related?", "Channel Type", "HH Start Clock-Time", ] + list(range(1, 51)) writer.writerow(titles) titles_csv = cf.getvalue() cf.close() running_name, finished_name = chellow.dloads.make_names( "_".join(base_name) + (".zip" if is_zipped else ".csv"), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, mode="w", newline="") tf.write(titles_csv) for supply in supplies: cf = StringIO() writer = csv.writer(cf, lineterminator="\n") era = supply.find_era_at(sess, finish_date) if era is None: imp_mpan_core_str = exp_mpan_core_str = "NA" else: if era.imp_mpan_core is None: imp_mpan_core_str = "NA" else: imp_mpan_core_str = era.imp_mpan_core if era.exp_mpan_core is None: exp_mpan_core_str = "NA" else: exp_mpan_core_str = era.exp_mpan_core imp_related_str = "TRUE" if imp_related else "FALSE" hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type, ).order_by(HhDatum.start_date)) datum = next(hh_data, None) row = [] for current_date in hh_range(cache, start_date, finish_date): dt_ct = to_ct(current_date) if dt_ct.hour == 0 and dt_ct.minute == 0: if len(row) > 0: writer.writerow(row) row = [ imp_mpan_core_str, exp_mpan_core_str, imp_related_str, channel_type, dt_ct.strftime("%Y-%m-%d"), ] if datum is not None and datum.start_date == current_date: row.append(datum.value) datum = next(hh_data, None) else: row.append(None) if len(row) > 0: writer.writerow(row) if is_zipped: fname = "_".join((imp_mpan_core_str, exp_mpan_core_str, str(supply.id) + ".csv")) zf.writestr(fname.encode("ascii"), titles_csv + cf.getvalue()) else: tf.write(cf.getvalue()) cf.close() # Avoid long-running transaction sess.rollback() if is_zipped: zf.close() else: tf.close() except BaseException: msg = traceback.format_exc() if is_zipped: zf.writestr("error.txt", msg) zf.close() else: tf.write(msg) finally: if sess is not None: sess.close() os.rename(running_name, finished_name)
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check RCRCs.") contract = Contract.get_non_core_by_name(sess, 'rcrc') latest_rs = sess.query(RateScript).filter( RateScript.contract_id == contract.id).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id latest_rs_start = latest_rs.start_date month_start = latest_rs_start + relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH now = Datetime.now(pytz.utc) if now > month_finish: self.log( "Checking to see if data is available from " + str(month_start) + " to " + str(month_finish) + " on Elexon Portal.") config = Contract.get_non_core_by_name( sess, 'configuration') props = config.make_properties() scripting_key = props.get( ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " "properties.") contract_props = contract.make_properties() url_str = ''.join( ( contract_props['url'], 'file/download/RCRC_FILE?key=', scripting_key)) r = requests.get(url_str) parser = csv.reader( (l.decode() for l in r.iter_lines()), delimiter=',', quotechar='"') piterator = iter(parser) values = next(piterator) values = next(piterator) month_rcrcs = {} for values in piterator: hh_date = Datetime.strptime( values[0], "%d/%m/%Y").replace(tzinfo=pytz.utc) hh_date += relativedelta(minutes=30*int(values[2])) if month_start <= hh_date <= month_finish: month_rcrcs[key_format(hh_date)] = values[3] if key_format(month_finish) in month_rcrcs: self.log("The whole month's data is there.") script = "def rates():\n return {\n" + \ ',\n'.join( "'" + k + "': " + month_rcrcs[k] for k in sorted(month_rcrcs.keys())) + "}" set_read_write(sess) contract = Contract.get_non_core_by_name( sess, 'rcrc') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, month_finish, rs.script) contract.insert_rate_script( sess, month_start, script) sess.commit() self.log("Added new rate script.") else: msg = "There isn't a whole month there yet." if len(month_rcrcs) > 0: msg += " The last date is " + \ sorted(month_rcrcs.keys())[-1] self.log(msg) except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: self.lock.release() self.log("Finished checking RCRC rates.") if sess is not None: sess.close() self.going.wait(30 * 60) self.going.clear()
def run(self): sess = None try: self._log("Starting to parse the file with '" + self.parser_name + "'.") sess = Session() g_batch = GBatch.get_by_id(sess, self.g_batch_id) raw_bills = self.parser.make_raw_bills() self._log("Successfully parsed the file, and now I'm starting to " "insert the raw bills.") for self.bill_num, raw_bill in enumerate(raw_bills): try: bill_type = BillType.get_by_code( sess, raw_bill['bill_type_code']) g_supply = GSupply.get_by_mprn(sess, raw_bill['mprn']) g_bill = g_batch.insert_g_bill( sess, g_supply, bill_type, raw_bill['reference'], raw_bill['account'], raw_bill['issue_date'], raw_bill['start_date'], raw_bill['finish_date'], raw_bill['kwh'], raw_bill['net_gbp'], raw_bill['vat_gbp'], raw_bill['gross_gbp'], raw_bill['raw_lines'], raw_bill['breakdown']) sess.flush() for raw_read in raw_bill['reads']: prev_type = GReadType.get_by_code( sess, raw_read['prev_type_code']) pres_type = GReadType.get_by_code( sess, raw_read['pres_type_code']) g_unit = GUnit.get_by_code(sess, raw_read['unit']) g_read = g_bill.insert_g_read( sess, raw_read['msn'], g_unit, raw_read['correction_factor'], raw_read['calorific_value'], raw_read['prev_value'], raw_read['prev_date'], prev_type, raw_read['pres_value'], raw_read['pres_date'], pres_type) sess.expunge(g_read) self.successful_bills.append(raw_bill) sess.expunge(g_bill) except BadRequest as e: sess.rollback() raw_bill['error'] = e.description self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: sess.commit() self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: sess.rollback() self._log("The import has finished, but " + str(len(self.failed_bills)) + " bills failed to load, " + "and so the whole import has been rolled back.") except BadRequest as e: self._log(e.description) except BaseException: self._log("I've encountered a problem: " + traceback.format_exc()) finally: if sess is not None: sess.rollback() sess.close()
def content(contract_id, end_year, end_month, months, user): caches = {} sess = f = supply_source = None try: sess = Session() contract = Contract.get_dc_by_id(sess, contract_id) month_list = list( c_months_u(finish_year=end_year, finish_month=end_month, months=months)) start_date, finish_date = month_list[0][0], month_list[-1][-1] forecast_date = chellow.computer.forecast_date() running_name, finished_name = chellow.dloads.make_names( "dc_virtual_bills.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") bill_titles = chellow.computer.contract_func(caches, contract, "virtual_bill_titles")() header_titles = [ "Import MPAN Core", "Export MPAN Core", "Start Date", "Finish Date", ] vb_func = chellow.computer.contract_func(caches, contract, "virtual_bill") writer.writerow(header_titles + bill_titles) for era in (sess.query(Era).distinct().filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date, Era.dc_contract == contract, ).options(joinedload(Era.channels)).order_by(Era.supply_id)): imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_mpan_core_str = "" is_import = False else: is_import = True imp_mpan_core_str = imp_mpan_core exp_mpan_core = era.exp_mpan_core exp_mpan_core_str = "" if exp_mpan_core is None else exp_mpan_core chunk_start = hh_max(era.start_date, start_date) chunk_finish = hh_min(era.finish_date, finish_date) vals = [ imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start), hh_format(chunk_finish), ] supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, caches) vb_func(supply_source) bill = supply_source.dc_bill for title in bill_titles: vals.append(csv_make_val(bill.get(title))) if title in bill: del bill[title] for k in sorted(bill.keys()): vals.append(k) vals.append(csv_make_val(bill[k])) writer.writerow(vals) # Avoid long-running transactions sess.rollback() except BadRequest as e: msg = "Problem " if supply_source is not None: msg += ("with supply " + supply_source.mpan_core + " starting at " + hh_format(supply_source.start_date) + " ") msg += str(e) writer.writerow([msg]) except BaseException: msg = "Problem " + traceback.format_exc() + "\n" f.write(msg) finally: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(year, month, months, supply_id, user): sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "register_reads.csv", user) f = open(running_name, mode="w", newline="") w = csv.writer(f, lineterminator="\n") titles = ( "Duration Start", "Duration Finish", "Supply Id", "Import MPAN Core", "Export MPAN Core", "Batch Reference", "Bill Id", "Bill Reference", "Bill Issue Date", "Bill Type", "Register Read Id", "TPR", "Coefficient", "Previous Read Date", "Previous Read Value", "Previous Read Type", "Present Read Date", "Present Read Value", "Present Read Type", ) w.writerow(titles) month_pairs = list( c_months_u(finish_year=year, finish_month=month, months=months)) start_date, finish_date = month_pairs[0][0], month_pairs[-1][-1] supplies = (sess.query(Supply).join(Bill).join(RegisterRead).filter( or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), )).order_by(Bill.supply_id).distinct()) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Bill.supply == supply) for supply in supplies: supply_id = supply.id for bill, batch, bill_type in (sess.query( Bill, Batch, BillType).join(Batch).join(BillType).join( RegisterRead).filter( Bill.supply == supply, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), )): era = supply.find_era_at(sess, bill.start_date) if era is None: eras = (sess.query(Era).filter( Era.supply == supply).order_by(Era.start_date).all()) if bill.start_date < eras[0].start_date: era = eras[0] else: era = eras[-1] for read in (sess.query(RegisterRead).filter( RegisterRead.bill == bill, or_( and_( RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ), and_( RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ), ), ).options( joinedload(RegisterRead.tpr), joinedload(RegisterRead.previous_type), joinedload(RegisterRead.present_type), )): vals = [ start_date, finish_date, supply_id, era.imp_mpan_core, era.exp_mpan_core, batch.reference, bill.id, bill.reference, bill.issue_date, bill_type.code, read.id, "md" if read.tpr is None else read.tpr.code, read.coefficient, read.previous_date, read.previous_value, read.previous_type.code, read.present_date, read.present_value, read.present_type.code, ] w.writerow(csv_make_val(v) for v in vals) # Avoid a long-running transaction sess.rollback() except BadRequest as e: w.writerow([e.description]) except BaseException: msg = traceback.format_exc() f.write(msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user): now = Datetime.now(pytz.utc) report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = rate_start.replace(tzinfo=pytz.utc) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = Datetime( now.year, now.month, 1, tzinfo=pytz.utc) else: start_date = start_date.replace(tzinfo=pytz.utc) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = kwh_start.replace(tzinfo=pytz.utc) sites = sess.query(Site).join(SiteEra).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") f = odswriter.writer(rf, '1.1') group_tab = f.new_sheet("Site Level") sup_tab = f.new_sheet("Supply Level") changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace( tzinfo=pytz.utc) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) sup_header_titles = [ 'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr) \ .join(Era.supply).join(Source).filter( Era.start_date <= start_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles', None) if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) sup_tab.writerow( sup_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) group_tab.writerow(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_associates = set() site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) for group in site.groups( sess, month_start, month_finish, False): site_associates.update( set( s.code for s in group.sites if s.code != site.code)) for cand_supply in group.supplies: site_sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: site_gen_types.add(cand_supply.generator_type.code) for cand_era in sess.query(Era).filter( Era.supply == cand_supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)). \ options( joinedload(Era.channels), joinedload(Era.pc), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if site_category != 'hh': if cand_era.pc.code == '00': site_category = 'hh' elif site_category != 'amr': if len(cand_era.channels) > 0: site_category = 'amr' elif site_category != 'nhh': if cand_era.mtc.meter_type.code \ not in ['UM', 'PH']: site_category = 'nhh' else: site_category = 'unmetered' for group in site.groups( sess, month_start, month_finish, True): calcs = [] deltas = defaultdict(int) group_associates = set( s.code for s in group.sites if s.code != site.code) for supply in group.supplies: if supply_id is not None and supply.id != supply_id: continue for era in sess.query(Era).join(Supply) \ .join(Source).filter( Era.supply == supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)) \ .options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if era.start_date > group.start_date: ss_start = era.start_date else: ss_start = group.start_date if hh_before(era.finish_date, group.finish_date): ss_finish = era.finish_date else: ss_finish = group.finish_date if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, None, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, None, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, group, group.start_date, group.finish_date) site_ds = chellow.computer.SiteSource( sess, site, group.start_date, group.finish_date, kwh_start, None, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill', None) if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += \ 'For the supply ' + \ site_ds.mpan_core + \ ' the virtual bill ' + \ str(disp_supplier_bill) + \ ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = \ month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, ','.join(sorted(list(group_associates))), month_finish] + \ [month_data[t] for t in summary_titles] sup_tab.writerow(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill', None) if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['used-gbp'] -= gbp kwh = sum( hh['msp-kwh'] for hh in imp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-kwh'] += kwh month_data['used-kwh'] -= kwh elif source_code in ('gen', 'gen-net'): month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is None: kwh = sess.query( func.coalesce( func.sum( cast(HhDatum.value, Float)), 0)). \ join(Channel).filter( Channel.era == era, Channel.channel_type == 'ACTIVE', Channel.imp_related == false()).scalar() if source_code == 'gen': month_data['export-net-kwh'] += kwh else: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill', None) export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['export-net-kwh'] += kwh month_data['export-net-gbp'] += gbp elif source_code in \ ('3rd-party', '3rd-party-reverse'): month_data['export-3rd-party-kwh'] += kwh month_data['export-3rd-party-gbp'] += gbp month_data['used-kwh'] -= kwh month_data['used-gbp'] -= gbp elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func( dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None sup_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[sup_category]: site_category = sup_category for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = \ float(overlap_duration) / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ era.imp_mpan_core, era.exp_mpan_core, sup_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(site_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ (mop_bill[t] if t in mop_bill else None) for t in title_dict['mop']] + [None] + \ [(dc_bill[t] if t in dc_bill else None) for t in title_dict['dc']] if imp_supplier_contract is None: out += [None] * \ (len(title_dict['imp-supplier']) + 1) else: out += [None] + [ ( imp_supplier_bill[t] if t in imp_supplier_bill else None) for t in title_dict['imp-supplier']] if exp_supplier_contract is not None: out += [None] + [ ( exp_supplier_bill[t] if t in exp_supplier_bill else None) for t in title_dict['exp-supplier']] for k, v in month_data.items(): site_month_data[k] += v sup_tab.writerow(out) group_tab.writerow( [ site.code, site.name, ''.join(sorted(list(site_associates))), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) sess.rollback() month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) finally: if sess is not None: sess.close() try: f.close() rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def content(start_date, finish_date, supply_id, mpan_cores, is_zipped, user): if is_zipped: file_extension = ".zip" else: file_extension = ".csv" base_name = ( "hh_data_row_" + to_ct(start_date).strftime("%Y%m%d%H%M") + file_extension ) tls = ["Site Code", "Imp MPAN Core", "Exp Mpan Core", "HH Start Clock-Time"] for polarity in ("Import", "Export"): for suffix in ( "ACTIVE kWh", "ACTIVE Status", "ACTIVE Modified", "REACTIVE_IMP kVArh", "REACTIVE_IMP Status", "REACTIVE_IMP Modified", "REACTIVE_EXP kVArh", "REACTIVE_EXP Status", "REACTIVE_EXP Modified", ): tls.append(polarity + " " + suffix) titles = csv_str(tls) running_name, finished_name = chellow.dloads.make_names(base_name, user) if is_zipped: zf = zipfile.ZipFile(running_name, "w") else: tmp_file = open(running_name, "w") sess = None try: sess = Session() caches = {} supplies = ( sess.query(Supply) .join(Era) .filter( Era.start_date <= finish_date, or_(Era.finish_date == null(), Era.finish_date >= start_date), ) .order_by(Era.supply_id, Era.start_date) .distinct() ) if supply_id is not None: sup = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Era.supply == sup) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores) ) ) if not is_zipped: tmp_file.write(titles) for supply in supplies: site, era = ( sess.query(Site, Era) .join(Era.site_eras) .filter( Era.supply == supply, Era.start_date <= finish_date, SiteEra.site_id == Site.id, or_(Era.finish_date == null(), Era.finish_date >= start_date), SiteEra.is_physical == true(), ) .order_by(Era.id) .first() ) outs = [] data = iter( sess.execute( """ select hh_base.start_date, max(imp_active.value), max(imp_active.status), max(imp_active.last_modified), max(imp_reactive_imp.value), max(imp_reactive_imp.status), max(imp_reactive_imp.last_modified), max(imp_reactive_exp.value), max(imp_reactive_exp.status), max(imp_reactive_exp.last_modified), max(exp_active.value), max(exp_active.status), max(exp_active.last_modified), max(exp_reactive_imp.value), max(imp_reactive_imp.status), max(exp_reactive_imp.last_modified), max(exp_reactive_exp.value), max(exp_reactive_exp.status), max(exp_reactive_exp.last_modified) from hh_datum hh_base join channel on hh_base.channel_id = channel.id join era on channel.era_id = era.id left join hh_datum imp_active on (imp_active.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'ACTIVE') left join hh_datum imp_reactive_imp on (imp_reactive_imp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_IMP') left join hh_datum imp_reactive_exp on (imp_reactive_exp.id = hh_base.id and channel.imp_related is true and channel.channel_type = 'REACTIVE_EXP') left join hh_datum exp_active on (exp_active.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'ACTIVE') left join hh_datum exp_reactive_imp on (exp_reactive_imp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_IMP') left join hh_datum exp_reactive_exp on (exp_reactive_exp.id = hh_base.id and channel.imp_related is false and channel.channel_type = 'REACTIVE_EXP') where supply_id = :supply_id and hh_base.start_date between :start_date and :finish_date group by hh_base.start_date order by hh_base.start_date """, params={ "supply_id": supply.id, "start_date": start_date, "finish_date": finish_date, }, ) ) datum = next(data, None) for dt in hh_range(caches, start_date, finish_date): row = [site.code, era.imp_mpan_core, era.exp_mpan_core, dt] if datum is not None: ( hh_start_date, imp_active, imp_active_status, imp_active_modified, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_imp_modified, imp_reactive_exp, imp_reactive_exp_status, imp_reactive_exp_modified, exp_active, exp_active_status, exp_active_modified, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_imp_modified, exp_reactive_exp, exp_reactive_exp_status, exp_reactive_exp_modified, ) = datum if hh_start_date == dt: datum = next(data, None) row += [ imp_active, imp_active_status, imp_active_modified, imp_reactive_imp, imp_reactive_imp_status, imp_reactive_imp_modified, imp_reactive_exp, imp_reactive_exp_status, imp_reactive_exp_modified, exp_active, exp_active_status, exp_active_modified, exp_reactive_imp, exp_reactive_imp_status, exp_reactive_imp_modified, exp_reactive_exp, exp_reactive_exp_status, exp_reactive_exp_modified, ] outs.append(csv_str(row)) if is_zipped: zf.writestr( ( "hh_data_row_" + str(era.id) + "_" + str(era.imp_mpan_core) + "_" + str(era.exp_mpan_core) ).replace(" ", "") + ".csv", titles + "".join(outs), ) else: tmp_file.write("".join(outs)) # Avoid a long-running transaction sess.rollback() except BaseException: msg = "Problem " + traceback.format_exc() if is_zipped: zf.writestr("error.txt", msg) else: tmp_file.write(msg) finally: if sess is not None: sess.close() if is_zipped: zf.close() else: tmp_file.close() os.rename(running_name, finished_name)
def run(self): sess = None try: sess = Session() self._log( "Starting to parse the file with '" + self.parser_name + "'.") set_read_write(sess) batch = Batch.get_by_id(sess, self.batch_id) raw_bills = self.parser.make_raw_bills() self._log( "Successfully parsed the file, and now I'm starting to " "insert the raw bills.") for self.bill_num, raw_bill in enumerate(raw_bills): try: with sess.begin_nested(): sess.execute( "set transaction isolation level serializable " "read write") bill_type = BillType.get_by_code( sess, raw_bill['bill_type_code']) bill = batch.insert_bill( sess, raw_bill['account'], raw_bill['reference'], raw_bill['issue_date'], raw_bill['start_date'], raw_bill['finish_date'], raw_bill['kwh'], raw_bill['net'], raw_bill['vat'], raw_bill['gross'], bill_type, raw_bill['breakdown']) sess.flush() for raw_read in raw_bill['reads']: tpr_code = raw_read['tpr_code'] if tpr_code is None: tpr = None else: tpr = Tpr.get_by_code(sess, tpr_code) prev_type = ReadType.get_by_code( sess, raw_read['prev_type_code']) pres_type = ReadType.get_by_code( sess, raw_read['pres_type_code']) bill.insert_read( sess, tpr, raw_read['coefficient'], raw_read['units'], raw_read['msn'], raw_read['mpan'], raw_read['prev_date'], raw_read['prev_value'], prev_type, raw_read['pres_date'], raw_read['pres_value'], pres_type) self.successful_bills.append(raw_bill) except BadRequest as e: raw_bill['error'] = str(e.description) self.failed_bills.append(raw_bill) if len(self.failed_bills) == 0: sess.commit() self._log( "All the bills have been successfully loaded and attached " "to the batch.") else: sess.rollback() self._log( "The import has finished, but there were " + str(len(self.failed_bills)) + " failures, and so the " "whole import has been rolled back.") except: sess.rollback() self._log("I've encountered a problem: " + traceback.format_exc()) finally: if sess is not None: sess.close()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check TLMs.") contract = Contract.get_non_core_by_name(sess, 'tlms') latest_rs = sess.query(RateScript).filter( RateScript.contract_id == contract.id).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id next_month_start = latest_rs.start_date + \ relativedelta(months=1) next_month_finish = latest_rs.start_date + \ relativedelta(months=2) - HH now = utc_datetime_now() if now > next_month_start: self.log( "Checking to see if data is available from " + str(next_month_start) + " to " + str(next_month_finish) + " on Elexon Portal.") config = Contract.get_non_core_by_name( sess, 'configuration') props = config.make_properties() scripting_key = props.get( ELEXON_PORTAL_SCRIPTING_KEY_KEY) if scripting_key is None: raise BadRequest( "The property " + ELEXON_PORTAL_SCRIPTING_KEY_KEY + " cannot be found in the configuration " + "properties.") contract_props = contract.make_properties() url_str = ''.join( ( contract_props['url'], 'file/download/TLM_FILE?key=', scripting_key)) r = requests.get(url_str) parser = csv.reader( (l.decode() for l in r.iter_lines()), delimiter=',', quotechar='"') self.log("Opened " + url_str + ".") next(parser, None) month_tlms = {} for values in parser: hh_date_ct = to_ct( Datetime.strptime(values[0], "%d/%m/%Y")) hh_date = to_utc(hh_date_ct) hh_date += relativedelta(minutes=30*int(values[2])) if next_month_start <= hh_date <= \ next_month_finish: month_tlms[key_format(hh_date)] = { 'off-taking': values[3], 'delivering': values[4]} if key_format(next_month_finish) in month_tlms: self.log("The whole month's data is there.") script = "def tlms():\n return {\n" + \ ',\n'.join( "'" + k + "': " + month_tlms[k]['off-taking'] for k in sorted(month_tlms.keys())) + "}" contract = Contract.get_non_core_by_name( sess, 'tlms') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, rs.script) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: msg = "There isn't a whole month there yet." if len(month_tlms) > 0: msg += "The last date is " + \ sorted(month_tlms.keys())[-1] self.log(msg) except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking TLM rates.") self.going.wait(30 * 60) self.going.clear()
def content(year, supply_id, user): caches = {} sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_triad.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') march_start = Datetime(year, 3, 1, tzinfo=pytz.utc) march_finish = Datetime(year, 4, 1, tzinfo=pytz.utc) - HH def triad_csv(supply_source): if supply_source is None or \ supply_source.mpan_core.startswith('99'): return [''] * 19 chellow.duos.duos_vb(supply_source) chellow.triad.hh(supply_source) chellow.triad.bill(supply_source) bill = supply_source.supplier_bill for rname, rset in supply_source.supplier_rate_sets.items(): if len(rset) == 1: bill[rname] = rset.pop() values = [supply_source.mpan_core] for i in range(1, 4): triad_prefix = 'triad-actual-' + str(i) for suffix in [ '-date', '-msp-kw', '-status', '-laf', '-gsp-kw' ]: values.append(bill[triad_prefix + suffix]) suffixes = ['gsp-kw', 'rate', 'gbp'] values += [bill['triad-actual-' + suf] for suf in suffixes] return values writer.writerow( ("Site Code", "Site Name", "Supply Name", "Source", "Generator Type", "Import MPAN Core", "Import T1 Date", "Import T1 MSP kW", "Import T1 Status", "Import T1 LAF", "Import T1 GSP kW", "Import T2 Date", "Import T2 MSP kW", "Import T2 Status", "Import T2 LAF", "Import T2 GSP kW", "Import T3 Date", "Import T3 MSP kW", "Import T3 Status", "Import T3 LAF", "Import T3 GSP kW", "Import GSP kW", "Import Rate GBP / kW", "Import GBP", "Export MPAN Core", "Export T1 Date", "Export T1 MSP kW", "Export T1 Status", "Export T1 LAF", "Export T1 GSP kW", "Export T2 Date", "Export T2 MSP kW", "Export T2 Status", "Export T2 LAF", "Export T2 GSP kW", "Export T3 Date", "Export T3 MSP kW", "Export T3 Status", "Export T3 LAF", "Export T3 GSP kW", "Export GSP kW", "Export Rate GBP / kW", "Export GBP")) forecast_date = chellow.computer.forecast_date() eras = sess.query(Era).join(Supply).join(Source).join(Pc).filter( Era.start_date <= march_finish, or_(Era.finish_date == null(), Era.finish_date >= march_start), Source.code.in_(('net', 'gen-net')), Pc.code == '00').order_by(Supply.id) if supply_id is not None: eras = eras.filter(Supply.id == supply_id) for era in eras: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == era).one() supply = era.supply imp_mpan_core = era.imp_mpan_core if imp_mpan_core is None: imp_supply_source = None else: imp_supply_source = chellow.computer.SupplySource( sess, march_start, march_finish, forecast_date, era, True, caches) exp_mpan_core = era.exp_mpan_core if exp_mpan_core is None: exp_supply_source = None else: exp_supply_source = chellow.computer.SupplySource( sess, march_start, march_finish, forecast_date, era, False, caches) gen_type = supply.generator_type gen_type = '' if gen_type is None else gen_type.code vals = [] for value in [ site.code, site.name, supply.name, supply.source.code, gen_type] + triad_csv(imp_supply_source) + \ triad_csv(exp_supply_source): if isinstance(value, Datetime): vals.append(hh_format(value)) else: vals.append(str(value)) writer.writerow(vals) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check BSUoS rates.") contract = Contract.get_non_core_by_name(sess, 'bsuos') latest_rs = sess.query(RateScript).filter( RateScript.contract == contract).order_by( RateScript.start_date.desc()).first() latest_rs_id = latest_rs.id this_month_start = latest_rs.start_date + \ relativedelta(months=1) next_month_start = this_month_start + \ relativedelta(months=1) now = Datetime.now(pytz.utc) props = contract.make_properties() if props.get('enabled', False): if now > next_month_start: url = props['url'] self.log( "Checking to see if data is available from " + str(this_month_start) + " to " + str(next_month_start - HH) + " at " + url) res = requests.get(url) self.log( "Received " + str(res.status_code) + " " + res.reason) book = xlrd.open_workbook( file_contents=res.content) sheet = book.sheet_by_index(0) ct_tz = pytz.timezone('Europe/London') month_bsuos = {} for row_index in range(1, sheet.nrows): row = sheet.row(row_index) raw_date = Datetime( *xlrd.xldate_as_tuple( row[0].value, book.datemode)) hh_date_ct = ct_tz.localize(raw_date) hh_date = pytz.utc.normalize( hh_date_ct.astimezone(pytz.utc)) hh_date += relativedelta( minutes=30*int(row[1].value)) if not hh_date < this_month_start and \ hh_date < next_month_start: month_bsuos[key_format(hh_date)] = \ row[2].value if key_format(next_month_start - HH) in \ month_bsuos: self.log("The whole month's data is there.") script = "def rates_gbp_per_mwh():\n " \ "return {\n" + ',\n'.join( "'" + k + "': " + str(month_bsuos[k]) for k in sorted( month_bsuos.keys())) + "}" set_read_write(sess) contract = Contract.get_non_core_by_name( sess, 'bsuos') rs = RateScript.get_by_id(sess, latest_rs_id) contract.update_rate_script( sess, rs, rs.start_date, rs.start_date + relativedelta(months=2) - HH, rs.script) sess.flush() contract.insert_rate_script( sess, rs.start_date + relativedelta(months=1), script) sess.commit() self.log("Added new rate script.") else: self.log( "There isn't a whole month there yet. The " "last date is " + sorted(month_bsuos.keys())[-1]) else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True.") except: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking BSUoS rates.") self.going.wait(30 * 60) self.going.clear()
def run(self): while not self.stopped.isSet(): if self.lock.acquire(False): sess = None try: sess = Session() self.log("Starting to check bank holidays") contract = Contract.get_non_core_by_name(sess, "bank_holidays") contract_props = contract.make_properties() if contract_props.get("enabled", False): url_str = contract_props["url"] self.log("Downloading from " + url_str + ".") res = requests.get(url_str) self.log( " ".join(("Received", str(res.status_code), res.reason)) ) PREFIX = "DTSTART;VALUE=DATE:" hols = collections.defaultdict(list) for line in res.text.splitlines(): if line.startswith(PREFIX): dt = utc_datetime_parse(line[-8:], "%Y%m%d") hols[dt.year].append(dt) for year in sorted(hols.keys()): year_start = utc_datetime(year, 1, 1) year_finish = year_start + relativedelta(years=1) - HH rs = ( sess.query(RateScript) .filter( RateScript.contract == contract, RateScript.start_date == year_start, ) .first() ) if rs is None: self.log( "Adding a new rate script starting at " + hh_format(year_start) + "." ) latest_rs = ( sess.query(RateScript) .filter(RateScript.contract == contract) .order_by(RateScript.start_date.desc()) .first() ) contract.update_rate_script( sess, latest_rs, latest_rs.start_date, year_finish, loads(latest_rs.script), ) rs = contract.insert_rate_script(sess, year_start, {}) script = { "bank_holidays": [ v.strftime("%Y-%m-%d") for v in hols[year] ] } contract.update_rate_script( sess, rs, rs.start_date, rs.finish_date, script ) sess.commit() self.log( "Updated rate script starting at " + hh_format(year_start) + "." ) else: self.log( "The automatic importer is disabled. To " "enable it, edit the contract properties to " "set 'enabled' to True." ) except BaseException: self.log("Outer problem " + traceback.format_exc()) sess.rollback() finally: if sess is not None: sess.close() self.lock.release() self.log("Finished checking bank holidays.") self.going.wait(24 * 60 * 60) self.going.clear()