def _find_hhs( sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code ): hhs = {} if len(pairs) == 0: pairs.append({"start-date": chunk_start, "units": 0}) # set finish dates for i in range(1, len(pairs)): pairs[i - 1]["finish-date"] = pairs[i]["start-date"] - HH pairs[-1]["finish-date"] = None # stretch if hh_after(pairs[0]["start-date"], chunk_start): pairs[0]["start-date"] = chunk_start # chop if hh_before(pairs[0]["finish-date"], chunk_start): del pairs[0] if hh_after(pairs[-1]["start-date"], chunk_finish): del pairs[-1] # squash if hh_before(pairs[0]["start-date"], chunk_start): pairs[0]["start-date"] = chunk_start if hh_after(pairs[-1]["finish-date"], chunk_finish): pairs[-1]["finish-date"] = chunk_finish cf = float(hist_g_era.correction_factor) g_unit = hist_g_era.g_unit unit_code, unit_factor = g_unit.code, float(g_unit.factor) for pair in pairs: units = pair["units"] for hh_date in hh_range(caches, pair["start-date"], pair["finish-date"]): cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date, g_ldz_code) hhs[hh_date] = { "unit_code": unit_code, "unit_factor": unit_factor, "units_consumed": units, "correction_factor": cf, "calorific_value": cv, "avg_cv": avg_cv, } return hhs
def _find_hhs(sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code): hhs = {} if len(pairs) == 0: pairs.append({'start-date': chunk_start, 'units': 0}) # set finish dates for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] - HH pairs[-1]['finish-date'] = None # stretch if hh_after(pairs[0]['start-date'], chunk_start): pairs[0]['start-date'] = chunk_start # chop if hh_before(pairs[0]['finish-date'], chunk_start): del pairs[0] if hh_after(pairs[-1]['start-date'], chunk_finish): del pairs[-1] # squash if hh_before(pairs[0]['start-date'], chunk_start): pairs[0]['start-date'] = chunk_start if hh_after(pairs[-1]['finish-date'], chunk_finish): pairs[-1]['finish-date'] = chunk_finish cf = float(hist_g_era.correction_factor) g_unit = hist_g_era.g_unit unit_code, unit_factor = g_unit.code, float(g_unit.factor) for pair in pairs: units = pair['units'] for hh_date in hh_range(caches, pair['start-date'], pair['finish-date']): cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date, g_ldz_code) hhs[hh_date] = { 'unit_code': unit_code, 'unit_factor': unit_factor, 'units_consumed': units, 'correction_factor': cf, 'calorific_value': cv, 'avg_cv': avg_cv } return hhs
def mpan_bit( sess, supply, is_import, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches): mpan_core_str = '' llfc_code = '' sc_str = '' supplier_contract_name = '' gsp_kwh = '' for era in eras: mpan_core = era.imp_mpan_core if is_import else era.exp_mpan_core if mpan_core is None: continue mpan_core_str = mpan_core if is_import: supplier_contract_name = era.imp_supplier_contract.name llfc = era.imp_llfc sc = era.imp_sc else: supplier_contract_name = era.exp_supplier_contract.name llfc = era.exp_llfc sc = era.exp_sc llfc_code = llfc.code sc_str = str(sc) if llfc.is_import and era.pc.code == '00' and \ supply.source.code not in ('gen') and \ supply.dno_contract.name != '99': if gsp_kwh == '': gsp_kwh = 0 if chunk_start > era.start_date: block_start = chunk_start else: block_start = era.start_date if hh_before(chunk_finish, era.finish_date): block_finish = chunk_finish else: block_finish = era.finish_date supply_source = chellow.computer.SupplySource( sess, block_start, block_finish, forecast_date, era, is_import, None, caches) chellow.duos.duos_vb(supply_source) gsp_kwh += sum( [datum['gsp-kwh'] for datum in supply_source.hh_data]) md = 0 sum_kwh = 0 non_actual = 0 date_at_md = None kvarh_at_md = None num_na = 0 for datum in sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.imp_related == is_import, Channel.channel_type == 'ACTIVE', HhDatum.start_date >= chunk_start, HhDatum.start_date <= chunk_finish).order_by(HhDatum.id): hh_value = float(datum.value) hh_status = datum.status if hh_value > md: md = hh_value date_at_md = datum.start_date kvarh_at_md = sess.query( cast(func.max(HhDatum.value), Float)).join( Channel).join(Era).filter( Era.supply == supply, Channel.imp_related == is_import, Channel.channel_type != 'ACTIVE', HhDatum.start_date == date_at_md).one()[0] sum_kwh += hh_value if hh_status != 'A': non_actual += hh_value num_na += 1 kw_at_md = md * 2 if kvarh_at_md is None: kva_at_md = 'None' else: kva_at_md = (kw_at_md ** 2 + (kvarh_at_md * 2) ** 2) ** 0.5 num_bad = num_hh - sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, Channel.imp_related == is_import, Channel.channel_type == 'ACTIVE', HhDatum.start_date >= chunk_start, HhDatum.start_date <= chunk_finish).count() + num_na date_at_md_str = '' if date_at_md is None else hh_format(date_at_md) return ','.join(str(val) for val in [ llfc_code, mpan_core_str, sc_str, supplier_contract_name, sum_kwh, non_actual, gsp_kwh, kw_at_md, date_at_md_str, kva_at_md, num_bad])
def content(supply_id, start_date, finish_date, user): forecast_date = datetime.datetime.max.replace(tzinfo=pytz.utc) caches = {} f = sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, "w") f.write( ','.join( ( "Supply Id", "Supply Name", "Source", "Generator Type", "Site Ids", "Site Names", "From", "To", "PC", "MTC", "CoP", "SSC", "Normal Reads", "Type", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs"))) supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supplies = supplies.filter( Supply.id == Supply.get_by_id(sess, supply_id).id) for supply in supplies: site_codes = '' site_names = '' eras = supply.find_eras(sess, start_date, finish_date) era = eras[-1] for site_era in era.site_eras: site = site_era.site site_codes = site_codes + site.code + ', ' site_names = site_names + site.name + ', ' site_codes = site_codes[:-2] site_names = site_names[:-2] if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add( str(rdate) + "_" + read.msn) supply_type = era.make_meter_category() if eras[0].start_date > start_date: chunk_start = eras[0].start_date else: chunk_start = start_date if hh_before(finish_date, era.finish_date): chunk_finish = finish_date else: chunk_finish = era.finish_date num_hh = int( (chunk_finish - (chunk_start - HH)).total_seconds() / (30 * 60)) f.write( '\n' + ','.join( ('"' + str(value) + '"') for value in [ supply.id, supply.name, supply.source.code, generator_type, site_codes, site_names, hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, len(prime_reads), supply_type]) + ',') f.write( mpan_bit( sess, supply, True, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches) + "," + mpan_bit( sess, supply, False, num_hh, eras, chunk_start, chunk_finish, forecast_date, caches)) except: f.write(traceback.format_exc()) finally: sess.close() f.close() os.rename(running_name, finished_name)
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append( {'date': date, 'code': code, 'mpan-core': mpan_core}) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] writer.writerow(vals) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(g_batch_id, g_bill_id, user): forecast_date = to_utc(Datetime.max) report_context = {} sess = tmp_file = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'g_bill_check.csv', user) tmp_file = open(running_name, "w") csv_writer = csv.writer(tmp_file) if g_batch_id is not None: g_batch = GBatch.get_by_id(sess, g_batch_id) g_bills = sess.query(GBill).filter( GBill.g_batch == g_batch).order_by(GBill.reference) elif g_bill_id is not None: g_bill = GBill.get_by_id(sess, g_bill_id) g_bills = sess.query(GBill).filter(GBill.id == g_bill.id) g_batch = g_bill.g_batch g_contract = g_batch.g_contract vbf = chellow.g_engine.g_contract_func( report_context, g_contract, 'virtual_bill') if vbf is None: raise BadRequest( 'The contract ' + g_contract.name + " doesn't have a function virtual_bill.") header_titles = [ 'batch', 'bill_reference', 'bill_type', 'bill_start_date', 'bill_finish_date', 'mprn', 'supply_name', 'site_code', 'site_name', 'covered_start', 'covered_finish', 'covered_bill_ids'] bill_titles = chellow.g_engine.g_contract_func( report_context, g_contract, 'virtual_bill_titles')() titles = header_titles[:] for title in bill_titles: for prefix in ('covered_', 'virtual_'): titles.append(prefix + title) if title.endswith('_gbp'): titles.append('difference_' + title) csv_writer.writerow(titles) for g_bill in g_bills: problem = '' g_supply = g_bill.g_supply read_dict = collections.defaultdict(set) for g_read in g_bill.g_reads: if not all( g_read.msn == era.msn for era in g_supply.find_g_eras( sess, g_read.prev_date, g_read.pres_date)): problem += "The MSN " + g_read.msn + \ " of the register read " + str(g_read.id) + \ " doesn't match the MSN of all the relevant eras." for dt, typ in [ (g_read.pres_date, g_read.pres_type), (g_read.prev_date, g_read.prev_type)]: typ_set = read_dict[str(dt) + "-" + g_read.msn] typ_set.add(typ) if len(typ_set) > 1: problem += " Reads taken on " + str(dt) + \ " have differing read types." g_era = g_supply.find_g_era_at(sess, g_bill.finish_date) if g_era is None: csv_writer.writerow( [ "Extraordinary! There isn't a era for this bill!"]) continue vals = { 'batch': g_batch.reference, 'bill_reference': g_bill.reference, 'bill_type': g_bill.bill_type.code, 'bill_start_date': g_bill.start_date, 'bill_finish_date': g_bill.finish_date, 'mprn': g_supply.mprn, 'covered_vat_gbp': Decimal('0.00'), 'covered_net_gbp': Decimal('0.00'), 'covered_gross_gbp': Decimal('0.00'), 'covered_kwh': Decimal(0), 'covered_start': g_bill.start_date, 'covered_finish': g_bill.finish_date, 'covered_bill_ids': []} covered_primary_bill = None enlarged = True while enlarged: enlarged = False for covered_bill in sess.query(GBill).filter( GBill.g_supply_id == g_supply.id, GBill.start_date <= vals['covered_finish'], GBill.finish_date >= vals['covered_start']).order_by( GBill.issue_date.desc(), GBill.start_date): if covered_primary_bill is None and \ len(covered_bill.g_reads) > 0: covered_primary_bill = covered_bill if covered_bill.start_date < vals['covered_start']: vals['covered_start'] = covered_bill.start_date enlarged = True break if covered_bill.finish_date > vals['covered_finish']: vals['covered_finish'] = covered_bill.finish_date enlarged = True break for covered_bill in sess.query(GBill).filter( GBill.g_supply == g_supply, GBill.start_date <= vals['covered_finish'], GBill.finish_date >= vals['covered_start']).order_by( GBill.issue_date.desc(), GBill.start_date): vals['covered_bill_ids'].append(covered_bill.id) bdown = covered_bill.make_breakdown() vals['covered_kwh'] += covered_bill.kwh vals['covered_net_gbp'] += covered_bill.net vals['covered_vat_gbp'] += covered_bill.vat vals['covered_gross_gbp'] += covered_bill.gross for title in bill_titles: k = 'covered_' + title v = bdown.get(title) if v is not None: if title.endswith('_rate') or title in ( 'correction_factor', 'calorific_value', 'units'): if k not in vals: vals[k] = set() vals[k].add(v) else: try: vals[k] += v except KeyError: vals[k] = v except TypeError: raise BadRequest( "Problem with key " + str(k) + " and value " + str(v) + " for existing " + str(vals[k])) if title in ( 'correction_factor', 'calorific_value', 'units_code', 'units_factor'): if k not in vals: vals[k] = set() for g_read in covered_bill.g_reads: if title in ('units_code', 'units_factor'): g_units = g_read.g_units if title == 'units_code': v = g_units.code else: v = g_units.factor else: v = getattr(g_read, title) vals[k].add(v) for g_era in sess.query(GEra).filter( GEra.g_supply == g_supply, GEra.start_date <= vals['covered_finish'], or_( GEra.finish_date == null(), GEra.finish_date >= vals['covered_start'])).distinct(): site = sess.query(Site).join(SiteGEra).filter( SiteGEra.is_physical == true(), SiteGEra.g_era == g_era).one() if vals['covered_start'] > g_era.start_date: chunk_start = vals['covered_start'] else: chunk_start = g_era.start_date if hh_before(vals['covered_finish'], g_era.finish_date): chunk_finish = vals['covered_finish'] else: chunk_finish = g_era.finish_date data_source = chellow.g_engine.GDataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, report_context, covered_primary_bill) vbf(data_source) for k, v in data_source.bill.items(): vk = 'virtual_' + k try: if isinstance(v, set): vals[vk].update(v) else: vals[vk] += v except KeyError: vals[vk] = v except TypeError as detail: raise BadRequest( "For key " + str(vk) + " and value " + str(v) + ". " + str(detail)) vals['supply_name'] = g_supply.name vals['site_code'] = site.code vals['site_name'] = site.name for k, v in vals.items(): vals[k] = csv_make_val(v) for i, title in enumerate(titles): if title.startswith('difference_'): try: covered_val = float(vals[titles[i - 2]]) virtual_val = float(vals[titles[i - 1]]) vals[title] = covered_val - virtual_val except KeyError: vals[title] = None csv_writer.writerow( [ (vals.get(k) if vals.get(k) is not None else '') for k in titles]) except BadRequest as e: tmp_file.write("Problem: " + e.description) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: try: if sess is not None: sess.close() except: tmp_file.write("\nProblem closing session.") finally: tmp_file.close() os.rename(running_name, finished_name)
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user): now = Datetime.now(pytz.utc) report_context = {} future_funcs = {} report_context['future_funcs'] = future_funcs sess = None try: sess = Session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise BadRequest( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = rate_start.replace(tzinfo=pytz.utc) lib = importlib.import_module('chellow.' + contract.name) if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = Datetime( now.year, now.month, 1, tzinfo=pytz.utc) else: start_date = start_date.replace(tzinfo=pytz.utc) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = chellow.computer.forecast_date() else: kwh_start = kwh_start.replace(tzinfo=pytz.utc) sites = sess.query(Site).join(SiteEra).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct().order_by(Site.code) if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.filter(Era.supply == supply) running_name, finished_name = chellow.dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") f = odswriter.writer(rf, '1.1') group_tab = f.new_sheet("Site Level") sup_tab = f.new_sheet("Supply Level") changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(io.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise BadRequest( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace( tzinfo=pytz.utc) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) sup_header_titles = [ 'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr) \ .join(Era.supply).join(Source).filter( Era.start_date <= start_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = chellow.computer.contract_func( report_context, cont, 'virtual_bill_titles', None) if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) sup_tab.writerow( sup_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) group_tab.writerow(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_associates = set() site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) for group in site.groups( sess, month_start, month_finish, False): site_associates.update( set( s.code for s in group.sites if s.code != site.code)) for cand_supply in group.supplies: site_sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: site_gen_types.add(cand_supply.generator_type.code) for cand_era in sess.query(Era).filter( Era.supply == cand_supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)). \ options( joinedload(Era.channels), joinedload(Era.pc), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if site_category != 'hh': if cand_era.pc.code == '00': site_category = 'hh' elif site_category != 'amr': if len(cand_era.channels) > 0: site_category = 'amr' elif site_category != 'nhh': if cand_era.mtc.meter_type.code \ not in ['UM', 'PH']: site_category = 'nhh' else: site_category = 'unmetered' for group in site.groups( sess, month_start, month_finish, True): calcs = [] deltas = defaultdict(int) group_associates = set( s.code for s in group.sites if s.code != site.code) for supply in group.supplies: if supply_id is not None and supply.id != supply_id: continue for era in sess.query(Era).join(Supply) \ .join(Source).filter( Era.supply == supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)) \ .options( joinedload(Era.ssc), joinedload(Era.hhdc_contract), joinedload(Era.mop_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract), joinedload(Era.channels), joinedload(Era.imp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.exp_llfc).joinedload( Llfc.voltage_level), joinedload(Era.cop), joinedload(Era.supply).joinedload( Supply.dno_contract), joinedload(Era.mtc).joinedload( Mtc.meter_type)): if era.start_date > group.start_date: ss_start = era.start_date else: ss_start = group.start_date if hh_before(era.finish_date, group.finish_date): ss_finish = era.finish_date else: ss_finish = group.finish_date if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, None, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, None, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = chellow.computer.displaced_era( sess, group, group.start_date, group.finish_date) site_ds = chellow.computer.SiteSource( sess, site, group.start_date, group.finish_date, kwh_start, None, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = chellow.computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill', None) if disp_vb_function is None: raise BadRequest( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += \ 'For the supply ' + \ site_ds.mpan_core + \ ' the virtual bill ' + \ str(disp_supplier_bill) + \ ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = \ month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, ','.join(sorted(list(group_associates))), month_finish] + \ [month_data[t] for t in summary_titles] sup_tab.writerow(out) for k, v in month_data.items(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs, key=str)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: import_vb_function = contract_func( report_context, imp_supplier_contract, 'virtual_bill', None) if import_vb_function is None: raise BadRequest( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['used-gbp'] -= gbp kwh = sum( hh['msp-kwh'] for hh in imp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-kwh'] += kwh month_data['used-kwh'] -= kwh elif source_code in ('gen', 'gen-net'): month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is None: kwh = sess.query( func.coalesce( func.sum( cast(HhDatum.value, Float)), 0)). \ join(Channel).filter( Channel.era == era, Channel.channel_type == 'ACTIVE', Channel.imp_related == false()).scalar() if source_code == 'gen': month_data['export-net-kwh'] += kwh else: export_vb_function = contract_func( report_context, exp_supplier_contract, 'virtual_bill', None) export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['export-net-kwh'] += kwh month_data['export-net-gbp'] += gbp elif source_code in \ ('3rd-party', '3rd-party-reverse'): month_data['export-3rd-party-kwh'] += kwh month_data['export-3rd-party-gbp'] += gbp month_data['used-kwh'] -= kwh month_data['used-gbp'] -= gbp elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func( dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None sup_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[sup_category]: site_category = sup_category for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date) ).total_seconds() + (30 * 60) overlap_proportion = \ float(overlap_duration) / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ era.imp_mpan_core, era.exp_mpan_core, sup_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(site_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ (mop_bill[t] if t in mop_bill else None) for t in title_dict['mop']] + [None] + \ [(dc_bill[t] if t in dc_bill else None) for t in title_dict['dc']] if imp_supplier_contract is None: out += [None] * \ (len(title_dict['imp-supplier']) + 1) else: out += [None] + [ ( imp_supplier_bill[t] if t in imp_supplier_bill else None) for t in title_dict['imp-supplier']] if exp_supplier_contract is not None: out += [None] + [ ( exp_supplier_bill[t] if t in exp_supplier_bill else None) for t in title_dict['exp-supplier']] for k, v in month_data.items(): site_month_data[k] += v sup_tab.writerow(out) group_tab.writerow( [ site.code, site.name, ''.join(sorted(list(site_associates))), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) sess.rollback() month_start += relativedelta(months=1) except BadRequest as e: msg = e.description + traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) finally: if sess is not None: sess.close() try: f.close() rf.close() os.rename(running_name, finished_name) except: msg = traceback.format_exc() r_name, f_name = chellow.dloads.make_names('error.txt', user) ef = open(r_name, "w") ef.write(msg + '\n') ef.close()
def _process_hh(ds, rate_period, est_kw, hh): month_start, month_finish = next( c_months_u(start_year=hh["ct-year"], start_month=hh["ct-month"])) month_start_ct = to_ct(month_start) if month_start_ct.month > 3: year = month_start_ct.year else: year = month_start_ct.year - 1 financial_year_start = to_utc(ct_datetime(year, 4, 1)) last_financial_year_start = to_utc(ct_datetime(year - 1, 4, 1)) financial_year_finish = to_utc(ct_datetime(year + 1, 3, 31, 23, 30)) est_triad_kws = [] earliest_triad = None for dt in get_file_rates(ds.caches, "triad_dates", last_financial_year_start)["triad_dates"]: triad_hh = None earliest_triad = hh_min(earliest_triad, dt) try: d = next(ds.get_data_sources(dt, dt, financial_year_start)) chellow.duos.duos_vb(d) triad_hh = d.hh_data[0] while dt < financial_year_start: dt += relativedelta(years=1) for d in ds.get_data_sources(dt, dt, financial_year_start): chellow.duos.duos_vb(d) datum = d.hh_data[0] triad_hh["laf"] = datum["laf"] triad_hh["gsp-kw"] = datum["laf"] * triad_hh["msp-kw"] except StopIteration: triad_hh = { "hist-start": dt, "msp-kw": 0, "start-date": dt, "status": "before start of MPAN", "laf": 1, "gsp-kw": 0, } est_triad_kws.append(triad_hh) if ds.site is None: era = ds.supply.find_era_at(ds.sess, earliest_triad) if (era is None or era.get_channel(ds.sess, ds.is_import, "ACTIVE") is None and est_kw is None): est_kw = 0.85 * max(datum["msp-kwh"] for datum in ds.hh_data) * 2 if est_kw is not None: for est_datum in est_triad_kws: est_datum["msp-kw"] = est_kw est_datum["gsp-kw"] = est_datum["msp-kw"] * est_datum["laf"] gsp_kw = 0 for i, triad_hh in enumerate(est_triad_kws): triad_prefix = "triad-estimate-" + str(i + 1) hh[triad_prefix + "-date"] = triad_hh["hist-start"] hh[triad_prefix + "-msp-kw"] = triad_hh["msp-kw"] hh[triad_prefix + "-status"] = triad_hh["status"] hh[triad_prefix + "-laf"] = triad_hh["laf"] hh[triad_prefix + "-gsp-kw"] = triad_hh["gsp-kw"] gsp_kw += triad_hh["gsp-kw"] hh["triad-estimate-gsp-kw"] = gsp_kw / 3 polarity = "import" if ds.llfc.is_import else "export" gsp_group_code = ds.gsp_group_code rate = float( get_file_rates( ds.caches, "triad_rates", month_start)["triad_gbp_per_gsp_kw"][polarity][gsp_group_code]) hh["triad-estimate-rate"] = rate est_triad_gbp = hh["triad-estimate-rate"] * hh["triad-estimate-gsp-kw"] if rate_period == "monthly": total_intervals = 12 est_intervals = 1 hh["triad-estimate-months"] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for d in ds.get_data_sources(month_start, month_finish): for h in d.hh_data: if h["ct-decimal-hour"] == 0: est_intervals += 1 hh["triad-estimate-days"] = est_intervals hh["triad-estimate-gbp"] = est_triad_gbp / total_intervals * est_intervals if hh["ct-month"] == 3: triad_kws = [] for t_date in get_file_rates(ds.caches, "triad_dates", month_start)["triad_dates"]: try: d = next(ds.get_data_sources(t_date, t_date)) if (ds.supplier_contract is None or d.supplier_contract == ds.supplier_contract): chellow.duos.duos_vb(d) thh = d.hh_data[0] else: thh = { "hist-start": t_date, "msp-kw": 0, "start-date": t_date, "status": "before contract", "laf": "before contract", "gsp-kw": 0, } except StopIteration: thh = { "hist-start": t_date, "msp-kw": 0, "start-date": t_date, "status": "before start of supply", "laf": "before start of supply", "gsp-kw": 0, } while t_date < financial_year_start: t_date += relativedelta(years=1) try: d = next(ds.get_data_sources(t_date, t_date)) if (ds.supplier_contract is None or d.supplier_contract == ds.supplier_contract): chellow.duos.duos_vb(d) thh["laf"] = d.hh_data[0]["laf"] thh["gsp-kw"] = thh["laf"] * thh["msp-kw"] except StopIteration: pass triad_kws.append(thh) gsp_kw = 0 for i, triad_hh in enumerate(triad_kws): pref = "triad-actual-" + str(i + 1) hh[pref + "-date"] = triad_hh["start-date"] hh[pref + "-msp-kw"] = triad_hh["msp-kw"] hh[pref + "-status"] = triad_hh["status"] hh[pref + "-laf"] = triad_hh["laf"] hh[pref + "-gsp-kw"] = triad_hh["gsp-kw"] gsp_kw += triad_hh["gsp-kw"] hh["triad-actual-gsp-kw"] = gsp_kw / 3 polarity = "import" if ds.llfc.is_import else "export" gsp_group_code = ds.gsp_group_code tot_rate = 0 for start_date, finish_date, script in get_file_scripts("triad_rates"): if start_date <= financial_year_finish and not hh_before( finish_date, financial_year_start): start_month = to_ct(start_date).month if start_month < 4: start_month += 12 if finish_date is None: finish_month = 3 else: finish_month = to_ct(finish_date).month if finish_month < 4: finish_month += 12 rt = get_file_rates( ds.caches, "triad_rates", start_date )["triad_gbp_per_gsp_kw"][polarity][gsp_group_code] tot_rate += (finish_month - start_month + 1) * float(rt) rate = tot_rate / 12 hh["triad-actual-rate"] = rate hh["triad-actual-gbp"] = hh["triad-actual-rate"] * hh[ "triad-actual-gsp-kw"] era = ds.supply.find_era_at(ds.sess, month_finish) est_intervals = 0 interval = (relativedelta( months=1) if rate_period == "monthly" else relativedelta(days=1)) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = ds.supply.find_era_at(ds.sess, dt) if rate_period == "monthly": hh["triad-all-estimates-months"] = est_intervals else: hh["triad-all-estimates-days"] = est_intervals hh["triad-all-estimates-gbp"] = (est_triad_gbp / total_intervals * est_intervals * -1)
def content(batch_id, bill_id, user): caches = {} tmp_file = sess = None forecast_date = Datetime.max.replace(tzinfo=pytz.utc) try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'bill_check.csv', user) tmp_file = open(running_name, mode='w', newline='') writer = csv.writer(tmp_file, lineterminator='\n') if batch_id is not None: batch = Batch.get_by_id(sess, batch_id) bills = sess.query(Bill).filter( Bill.batch_id == batch.id).order_by(Bill.reference) elif bill_id is not None: bill = Bill.get_by_id(sess, bill_id) bills = sess.query(Bill).filter(Bill.id == bill.id) batch = bill.batch contract = batch.contract market_role_code = contract.market_role.code vbf = chellow.computer.contract_func( caches, contract, 'virtual_bill', None) if vbf is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill.") virtual_bill_titles_func = chellow.computer.contract_func( caches, contract, 'virtual_bill_titles', None) if virtual_bill_titles_func is None: raise BadRequest( 'The contract ' + contract.name + " doesn't have a function virtual_bill_titles.") virtual_bill_titles = virtual_bill_titles_func() titles = [ 'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp', 'bill-vat-gbp', 'bill-start-date', 'bill-finish-date', 'bill-mpan-core', 'site-code', 'site-name', 'covered-from', 'covered-to', 'covered-bills', 'metered-kwh'] for t in virtual_bill_titles: titles.append('covered-' + t) titles.append('virtual-' + t) if t.endswith('-gbp'): titles.append('difference-' + t) writer.writerow(titles) for bill in bills: problem = '' supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: problem += "The MSN " + read_msn + \ " of the register read " + str(read.id) + \ " doesn't match the MSN of the era." for dt, type in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type)]: key = str(dt) + "-" + read.msn try: if type != read_dict[key]: problem += " Reads taken on " + str(dt) + \ " have differing read types." except KeyError: read_dict[key] = type bill_start = bill.start_date bill_finish = bill.finish_date era = supply.find_era_at(sess, bill.finish_date) if era is None: raise BadRequest( "Extraordinary! There isn't an era for the bill " + str(bill.id) + ".") values = [ batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, hh_format(bill_start), hh_format(bill_finish), era.imp_mpan_core] covered_start = bill_start covered_finish = bill_finish covered_bill_ids = [] covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0} covered_primary_bill = None enlarged = True while enlarged: enlarged = False covered_bills = [] cand_bills = dict( (b.id, b) for b in sess.query(Bill).join(Batch). join(Contract).join(MarketRole).filter( Bill.supply == supply, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start, MarketRole.code == market_role_code).order_by( Bill.issue_date.desc(), Bill.start_date)) while True: to_del = None for a, b in combinations(cand_bills.values(), 2): if all( ( a.start_date == b.start_date, a.finish_date == b.finish_date, a.kwh == -1 * b.kwh, a.net == -1 * b.net, a.vat == -1 * b.vat, a.gross == -1 * b.gross)): to_del = (a.id, b.id) break if to_del is None: break else: for k in to_del: del cand_bills[k] for cand_bill_id in sorted(cand_bills.keys()): cand_bill = cand_bills[cand_bill_id] if covered_primary_bill is None and \ len(cand_bill.reads) > 0: covered_primary_bill = cand_bill if cand_bill.start_date < covered_start: covered_start = cand_bill.start_date enlarged = True break if cand_bill.finish_date > covered_finish: covered_finish = cand_bill.finish_date enlarged = True break covered_bills.append(cand_bill) for covered_bill in covered_bills: covered_bill_ids.append(covered_bill.id) covered_bdown['net-gbp'] += float(covered_bill.net) covered_bdown['vat-gbp'] += float(covered_bill.vat) covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh) if len(covered_bill.breakdown) > 0: covered_rates = collections.defaultdict(set) for k, v in eval(covered_bill.breakdown, {}).items(): if k.endswith('rate'): covered_rates[k].add(v) elif k != 'raw-lines': try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) for k, v in covered_rates.items(): covered_bdown[k] = v.pop() if len(v) == 1 else None virtual_bill = {} metered_kwh = 0 for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.imp_mpan_core != null(), Era.start_date <= covered_finish, or_( Era.finish_date == null(), Era.finish_date >= covered_start)).distinct(): site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if covered_start > era.start_date: chunk_start = covered_start else: chunk_start = era.start_date if hh_before(covered_finish, era.finish_date): chunk_finish = covered_finish else: chunk_finish = era.finish_date data_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches, covered_primary_bill) if data_source.measurement_type == 'hh': metered_kwh += sum( h['msp-kwh'] for h in data_source.hh_data) else: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) metered_kwh += sum(h['msp-kwh'] for h in ds.hh_data) vbf(data_source) if market_role_code == 'X': vb = data_source.supplier_bill elif market_role_code == 'C': vb = data_source.dc_bill elif market_role_code == 'M': vb = data_source.mop_bill else: raise BadRequest("Odd market role.") for k, v in vb.items(): if k.endswith('-rate'): if k not in virtual_bill: virtual_bill[k] = set() virtual_bill[k].add(v) else: try: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError as detail: raise BadRequest( "For key " + str(k) + " and value " + str(v) + ". " + str(detail)) values += [ site.code, site.name, hh_format(covered_start), hh_format(covered_finish), ','.join(str(id).replace(',', '') for id in covered_bill_ids), metered_kwh] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append('') try: virt_val = virtual_bill[title] if isinstance(virt_val, set): virt_val = ', '.join(str(v) for v in virt_val) elif isinstance(virt_val, Datetime): virt_val = hh_format(virt_val) values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = None values.append('') if title.endswith('-gbp'): if all(isinstance(val, (int, float)) for val in [ cov_val, virt_val]): values.append(cov_val - virt_val) else: values.append('') for title in sorted(virtual_bill.keys()): virt_val = virtual_bill[title] if isinstance(virt_val, set): virt_val = ', '.join(str(v) for v in virt_val) elif isinstance(virt_val, Datetime): virt_val = hh_format(virt_val) values += ['virtual-' + title, virt_val] if title in covered_bdown: values += ['covered-' + title, covered_bdown[title]] else: values += ['', ''] writer.writerow(values) except BadRequest as e: tmp_file.write("Problem: " + e.description) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def hh(data_source, rate_period='monthly', est_kw=None): for hh in (h for h in data_source.hh_data if h['ct-is-month-end']): hh_start = hh['start-date'] month_start = utc_datetime(hh_start.year, hh_start.month) month_finish = month_start + relativedelta(months=1) - HH financial_year_start = month_start while financial_year_start.month != 4: financial_year_start -= relativedelta(months=1) last_financial_year_start = financial_year_start - relativedelta( years=1) financial_year_finish = financial_year_start + relativedelta( years=1) - HH est_triad_kws = [] earliest_triad = None for dt in get_file_rates( data_source.caches, 'triad_dates', last_financial_year_start)['triad_dates']: triad_hh = None earliest_triad = hh_min(earliest_triad, dt) try: ds = next( data_source.get_data_sources(dt, dt, financial_year_start)) chellow.duos.duos_vb(ds) triad_hh = ds.hh_data[0] while dt < financial_year_start: dt += relativedelta(years=1) for ds in data_source.get_data_sources( dt, dt, financial_year_start): chellow.duos.duos_vb(ds) datum = ds.hh_data[0] triad_hh['laf'] = datum['laf'] triad_hh['gsp-kw'] = datum['laf'] * triad_hh['msp-kw'] except StopIteration: triad_hh = { 'hist-start': dt, 'msp-kw': 0, 'start-date': dt, 'status': 'before start of MPAN', 'laf': 1, 'gsp-kw': 0} est_triad_kws.append(triad_hh) if data_source.site is None: era = data_source.supply.find_era_at( data_source.sess, earliest_triad) if era is None or era.get_channel( data_source.sess, data_source.is_import, 'ACTIVE') is None: if est_kw is not None: est_triad_kw = est_kw else: est_triad_kw = 0.85 * max( datum['msp-kwh'] for datum in data_source.hh_data) * 2 for est_datum in est_triad_kws: est_datum['msp-kw'] = est_triad_kw est_datum['gsp-kw'] = est_datum['msp-kw'] * \ est_datum['laf'] gsp_kw = 0 for i, triad_hh in enumerate(est_triad_kws): triad_prefix = 'triad-estimate-' + str(i + 1) hh[triad_prefix + '-date'] = triad_hh['hist-start'] hh[triad_prefix + '-msp-kw'] = triad_hh['msp-kw'] hh[triad_prefix + '-status'] = triad_hh['status'] hh[triad_prefix + '-laf'] = triad_hh['laf'] hh[triad_prefix + '-gsp-kw'] = triad_hh['gsp-kw'] gsp_kw += triad_hh['gsp-kw'] hh['triad-estimate-gsp-kw'] = gsp_kw / 3 polarity = 'import' if data_source.llfc.is_import else 'export' gsp_group_code = data_source.gsp_group_code rate = float( get_file_rates( data_source.caches, 'triad_rates', month_start)['triad_gbp_per_gsp_kw'][polarity][gsp_group_code]) hh['triad-estimate-rate'] = rate est_triad_gbp = hh['triad-estimate-rate'] * hh['triad-estimate-gsp-kw'] if rate_period == 'monthly': total_intervals = 12 est_intervals = 1 hh['triad-estimate-months'] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for ds in data_source.get_data_sources(month_start, month_finish): for h in ds.hh_data: if h['utc-decimal-hour'] == 0: est_intervals += 1 hh['triad-estimate-days'] = est_intervals hh['triad-estimate-gbp'] = est_triad_gbp / total_intervals * \ est_intervals if month_start.month == 3: triad_kws = [] for t_date in get_file_rates( data_source.caches, 'triad_dates', month_start)['triad_dates']: try: ds = next(data_source.get_data_sources(t_date, t_date)) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh = ds.hh_data[0] else: thh = { 'hist-start': t_date, 'msp-kw': 0, 'start-date': t_date, 'status': 'before contract', 'laf': 'before contract', 'gsp-kw': 0} except StopIteration: thh = { 'hist-start': t_date, 'msp-kw': 0, 'start-date': t_date, 'status': 'before start of supply', 'laf': 'before start of supply', 'gsp-kw': 0} while t_date < financial_year_start: t_date += relativedelta(years=1) try: ds = next(data_source.get_data_sources(t_date, t_date)) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh['laf'] = ds.hh_data[0]['laf'] thh['gsp-kw'] = thh['laf'] * thh['msp-kw'] except StopIteration: pass triad_kws.append(thh) gsp_kw = 0 for i, triad_hh in enumerate(triad_kws): pref = 'triad-actual-' + str(i + 1) hh[pref + '-date'] = triad_hh['start-date'] hh[pref + '-msp-kw'] = triad_hh['msp-kw'] hh[pref + '-status'] = triad_hh['status'] hh[pref + '-laf'] = triad_hh['laf'] hh[pref + '-gsp-kw'] = triad_hh['gsp-kw'] gsp_kw += triad_hh['gsp-kw'] hh['triad-actual-gsp-kw'] = gsp_kw / 3 polarity = 'import' if data_source.llfc.is_import else 'export' gsp_group_code = data_source.gsp_group_code tot_rate = 0 for start_date, finish_date, script in get_file_scripts( 'triad_rates'): if start_date <= financial_year_finish and not hh_before( finish_date, financial_year_start): start_month = start_date.month if start_month < 4: start_month += 12 if finish_date is None: finish_month = financial_year_finish.month else: finish_month = finish_date.month if finish_month < 4: finish_month += 12 rt = get_file_rates( data_source.caches, 'triad_rates', start_date )['triad_gbp_per_gsp_kw'][polarity][gsp_group_code] tot_rate += (finish_month - start_month + 1) * float(rt) rate = tot_rate / 12 hh['triad-actual-rate'] = rate hh['triad-actual-gbp'] = hh['triad-actual-rate'] * \ hh['triad-actual-gsp-kw'] era = data_source.supply.find_era_at( data_source.sess, month_finish) est_intervals = 0 interval = relativedelta(months=1) if \ rate_period == 'monthly' else relativedelta(days=1) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = data_source.supply.find_era_at(data_source.sess, dt) if rate_period == 'monthly': hh['triad-all-estimates-months'] = est_intervals else: hh['triad-all-estimates-days'] = est_intervals hh['triad-all-estimates-gbp'] = est_triad_gbp / \ total_intervals * est_intervals * -1
def content(contract_id, days_hidden, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( "channel_snags.csv", user) f = open(running_name, mode="w", newline="") writer = csv.writer(f, lineterminator="\n") titles = ( "Hidden Days", "Chellow Id", "Imp MPAN Core", "Exp MPAN Core", "Site Code", "Site Name", "Snag Description", "Import Related?", "Channel Type", "Start Date", "Finish Date", "Is Ignored?", "Days Since Snag Finished", "Duration Of Snag (Days)", ) writer.writerow(titles) contract = Contract.get_dc_by_id(sess, contract_id) now = utc_datetime_now() cutoff_date = now - relativedelta(days=days_hidden) for snag, channel, era, supply, site_era, site in (sess.query( Snag, Channel, Era, Supply, SiteEra, Site).join(Channel, Snag.channel_id == Channel.id).join( Era, Channel.era_id == Era.id).join( Supply, Era.supply_id == Supply.id).join( SiteEra, Era.site_eras).join( Site, SiteEra.site_id == Site.id).filter( SiteEra.is_physical == true(), Era.dc_contract == contract, Snag.start_date < cutoff_date, ).order_by( Site.code, Supply.id, Channel.imp_related, Channel.channel_type, Snag.description, Snag.start_date, Snag.id, )): snag_start = snag.start_date snag_finish = snag.finish_date imp_mc = "" if era.imp_mpan_core is None else era.imp_mpan_core exp_mc = "" if era.exp_mpan_core is None else era.exp_mpan_core if snag_finish is None: duration = now - snag_start age_of_snag = None else: duration = snag_finish - snag_start if hh_before(cutoff_date, snag_finish): age_of_snag = None else: delta = now - snag_finish age_of_snag = delta.days vals = { "Hidden Days": days_hidden, "Chellow Id": snag.id, "Imp MPAN Core": imp_mc, "Exp MPAN Core": exp_mc, "Site Code": site.code, "Site Name": site.name, "Snag Description": snag.description, "Import Related?": channel.imp_related, "Channel Type": channel.channel_type, "Start Date": snag_start, "Finish Date": snag_finish, "Is Ignored?": snag.is_ignored, "Days Since Snag Finished": age_of_snag, "Duration Of Snag (Days)": duration.days, } writer.writerow(csv_make_val(vals[t]) for t in titles) except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, supply_id, sess): yield "MPAN Core,Site Id,Site Name,Date,Event," year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({'date': date, 'code': code, 'mpan-core': mpan_core}) try: if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] yield '\n' + ','.join( '"' + str(val) + '"' for val in vals) + ',' else: yield ' ' except: yield traceback.format_exc()
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({ 'date': date, 'code': code, 'mpan-core': mpan_core }) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event(events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.dc_contract_id != prev_era.dc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event(events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event(events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event(events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code'] ] writer.writerow(vals) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(supply_id, file_name, start_date, finish_date, user): caches = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'supply_virtual_bills_' + str(supply_id) + '.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() prev_titles = None month_start = datetime.datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) while not month_start > finish_date: month_finish = month_start + relativedelta(months=1) - HH if month_start > start_date: period_start = month_start else: period_start = start_date if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for era in sess.query(Era).filter( Era.supply == supply, Era.start_date < period_finish, or_( Era.finish_date == null(), Era.finish_date > period_start )).order_by(Era.start_date): chunk_start = era.start_date \ if era.start_date > period_start else period_start chunk_finish = period_finish \ if hh_before(period_finish, era.finish_date) \ else era.finish_date site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) titles = [ 'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To', ''] output_line = [ era.imp_mpan_core, era.exp_mpan_core, site.code, site.name, ds.supplier_account, hh_format(ds.start_date), hh_format(ds.finish_date), ''] mop_titles = ds.contract_func( era.mop_contract, 'virtual_bill_titles')() titles.extend(['mop-' + t for t in mop_titles]) ds.contract_func(era.mop_contract, 'virtual_bill')(ds) bill = ds.mop_bill for title in mop_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) output_line.append('') dc_titles = ds.contract_func( era.hhdc_contract, 'virtual_bill_titles')() titles.append('') titles.extend(['dc-' + t for t in dc_titles]) ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds) bill = ds.dc_bill for title in dc_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.imp_supplier_contract is not None: output_line.append('') imp_supplier_titles = ds.contract_func( era.imp_supplier_contract, 'virtual_bill_titles')() titles.append('') titles.extend( ['imp-supplier-' + t for t in imp_supplier_titles]) ds.contract_func( era.imp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in imp_supplier_titles: if title in bill: output_line.append(bill[title]) del bill[title] else: output_line.append('') for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if era.exp_supplier_contract is not None: ds = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, False, None, caches) output_line.append('') exp_supplier_titles = ds.contract_func( era.exp_supplier_contract, 'virtual_bill_titles')() titles.append('') titles.extend( ['exp-supplier-' + t for t in exp_supplier_titles]) ds.contract_func( era.exp_supplier_contract, 'virtual_bill')(ds) bill = ds.supplier_bill for title in exp_supplier_titles: output_line.append(bill.get(title, '')) if title in bill: del bill[title] for k in sorted(bill.keys()): output_line.extend([k, bill[k]]) if titles != prev_titles: prev_titles != titles writer.writerow([str(v) for v in titles]) for i, val in enumerate(output_line): if isinstance(val, datetime.datetime): output_line[i] = hh_format(val) elif val is None: output_line[i] = '' else: output_line[i] = str(val) writer.writerow(output_line) month_start += relativedelta(months=1) except BadRequest as e: writer.writerow(["Problem: " + e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)