def get_data_sources(data_source, start_date, finish_date, forecast_date=None): if forecast_date is None: forecast_date = data_source.forecast_date if data_source.start_date == start_date and \ data_source.finish_date == finish_date \ and forecast_date == data_source.forecast_date: yield data_source else: for g_era in data_source.sess.query(GEra).filter( GEra.g_supply == data_source.g_supply, GEra.start_date <= finish_date, or_( GEra.finish_date == null(), GEra.finish_date >= start_date)): g_era_start = g_era.start_date if start_date < g_era_start: chunk_start = g_era_start else: chunk_start = start_date g_era_finish = g_era.finish_date chunk_finish = g_era_finish if \ hh_after(finish_date, g_era_finish) else finish_date ds = GDataSource( data_source.sess, chunk_start, chunk_finish, forecast_date, g_era, data_source.caches, data_source.bill) yield ds
def _find_hhs(sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code): hhs = {} if len(pairs) == 0: pairs.append({'start-date': chunk_start, 'units': 0}) # set finish dates for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] - HH pairs[-1]['finish-date'] = None # stretch if hh_after(pairs[0]['start-date'], chunk_start): pairs[0]['start-date'] = chunk_start # chop if hh_before(pairs[0]['finish-date'], chunk_start): del pairs[0] if hh_after(pairs[-1]['start-date'], chunk_finish): del pairs[-1] # squash if hh_before(pairs[0]['start-date'], chunk_start): pairs[0]['start-date'] = chunk_start if hh_after(pairs[-1]['finish-date'], chunk_finish): pairs[-1]['finish-date'] = chunk_finish cf = float(hist_g_era.correction_factor) g_unit = hist_g_era.g_unit unit_code, unit_factor = g_unit.code, float(g_unit.factor) for pair in pairs: units = pair['units'] for hh_date in hh_range(caches, pair['start-date'], pair['finish-date']): cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date, g_ldz_code) hhs[hh_date] = { 'unit_code': unit_code, 'unit_factor': unit_factor, 'units_consumed': units, 'correction_factor': cf, 'calorific_value': cv, 'avg_cv': avg_cv } return hhs
def _find_hhs( sess, caches, hist_g_era, pairs, chunk_start, chunk_finish, g_cv_id, g_ldz_code ): hhs = {} if len(pairs) == 0: pairs.append({"start-date": chunk_start, "units": 0}) # set finish dates for i in range(1, len(pairs)): pairs[i - 1]["finish-date"] = pairs[i]["start-date"] - HH pairs[-1]["finish-date"] = None # stretch if hh_after(pairs[0]["start-date"], chunk_start): pairs[0]["start-date"] = chunk_start # chop if hh_before(pairs[0]["finish-date"], chunk_start): del pairs[0] if hh_after(pairs[-1]["start-date"], chunk_finish): del pairs[-1] # squash if hh_before(pairs[0]["start-date"], chunk_start): pairs[0]["start-date"] = chunk_start if hh_after(pairs[-1]["finish-date"], chunk_finish): pairs[-1]["finish-date"] = chunk_finish cf = float(hist_g_era.correction_factor) g_unit = hist_g_era.g_unit unit_code, unit_factor = g_unit.code, float(g_unit.factor) for pair in pairs: units = pair["units"] for hh_date in hh_range(caches, pair["start-date"], pair["finish-date"]): cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date, g_ldz_code) hhs[hh_date] = { "unit_code": unit_code, "unit_factor": unit_factor, "units_consumed": units, "correction_factor": cf, "calorific_value": cv, "avg_cv": avg_cv, } return hhs
def make_raw_bills(self): raw_bills = [] for self.line_number, code in enumerate(self.parser): if code == "CLO": cloc = self.parser.elements[0] account = cloc[1] elif code == "BCD": ivdt = self.parser.elements[0] invn = self.parser.elements[2] btcd = self.parser.elements[5] reference = invn[0] bill_type_code = btcd[0] issue_date = self.parser.to_date(ivdt[0]) elif code == "MHD": typ = self.parser.elements[1] message_type = typ[0] if message_type == "UTLBIL": issue_date = None start_date = None finish_date = None account = None reference = None net = Decimal(0.00) vat = Decimal(0.00) reads = [] mpan_strings = [] elif code == "CCD": ccde = self.parser.elements[1] consumption_charge_indicator = ccde[0] charge_type = ccde[2] if consumption_charge_indicator != "5" and \ charge_type in ["7", "8", "9"]: prev_read_date = self.parser.to_date( self.parser.elements[7][0]) if hh_after(start_date, prev_read_date): start_date = prev_read_date register_finish_date = self.parser.to_date( self.parser.elements[6][0]) if finish_date is None or finish_date < register_finish_date: finish_date = register_finish_date if charge_type == "7": tmod = self.parser.elements[3] mtnr = self.parser.elements[4] mloc = self.parser.elements[5] prrd = self.parser.elements[9] adjf = self.parser.elements[12] pres_read_type = read_type_map[prrd[1]] prev_read_type = read_type_map[prrd[3]] coefficient = Decimal(adjf[1]) / Decimal(100000) pres_read_value = Decimal(prrd[0]) / Decimal(1000) prev_read_value = Decimal(prrd[2]) / Decimal(1000) msn = mtnr[0] tpr_code = tmod[0].zfill(5) reads.append( { 'msn': msn, 'mpan': mloc[0], 'coefficient': coefficient, 'units': 'kWh', 'tpr_code': tpr_code, 'prev_date': prev_read_date, 'prev_value': prev_read_value, 'prev_type_code': prev_read_type, 'pres_date': register_finish_date, 'pres_value': pres_read_value, 'pres_type_code': pres_read_type}) elif code == "MTR": if message_type == "UTLBIL": raw_bills.append( { 'bill_type_code': bill_type_code, 'account': account, 'mpans': mpan_strings, 'reference': reference, 'issue_date': issue_date, 'start_date': start_date, 'finish_date': finish_date, 'kwh': Decimal(0), 'net': net, 'vat': vat, 'gross': Decimal('0.00'), 'breakdown': {}, 'reads': reads}) elif code == "MAN": madn = self.parser.elements[2] pc_code = "0" + madn[3] mtc_code = madn[4] llfc_code = madn[5] mpan_strings.append( pc_code + " " + mtc_code + " " + llfc_code + " " + madn[0] + " " + madn[1] + madn[2]) elif code == "VAT": uvla = self.parser.elements[5] net = Decimal('0.00') + self.parser.to_decimal(uvla) uvtt = self.parser.elements[6] vat = Decimal('0.00') + self.parser.to_decimal(uvtt) return raw_bills
def process_site( sess, site, month_start, month_finish, forecast_date, tmp_file, start_date, finish_date, caches): site_code = site.code associates = [] sources = set() generator_types = set() metering_type = 'no-supply' problem = '' month_data = {} for stream_name in [ 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party']: month_data[stream_name + '-kwh'] = 0 month_data[stream_name + '-gbp'] = 0 billed_gbp = 0 billed_kwh = 0 for group in site.groups(sess, month_start, month_finish, False): for cand_site in group.sites: cand_site_code = cand_site.code if cand_site_code != site_code and \ cand_site_code not in associates: associates.append(cand_site_code) for cand_supply in group.supplies: sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: generator_types.add(cand_supply.generator_type.code) for cand_era in cand_supply.find_eras( sess, group.start_date, group.finish_date): if metering_type != 'hh': if cand_era.pc.code == '00': metering_type = 'hh' elif metering_type != 'amr': if len(cand_era.channels) > 0: metering_type = 'amr' elif metering_type != 'nhh': if cand_era.mtc.meter_type.code not in [ 'UM', 'PH']: metering_type = 'nhh' else: metering_type = 'unmetered' for group in site.groups(sess, month_start, month_finish, True): if group.start_date > start_date: chunk_start = group.start_date else: chunk_start = start_date if group.finish_date > finish_date: chunk_finish = finish_date else: chunk_finish = group.finish_date for supply in group.supplies: source_code = supply.source.code for era in sess.query(Era).filter( Era.supply == supply, Era.start_date <= chunk_finish, or_( Era.finish_date == null(), Era.finish_date >= chunk_start)).options( joinedload(Era.mop_contract), joinedload(Era.hhdc_contract), joinedload(Era.imp_supplier_contract), joinedload(Era.exp_supplier_contract)): tmp_file.write(' ') # GBP if era.start_date > chunk_start: bill_start = era.start_date else: bill_start = chunk_start if hh_after(era.finish_date, chunk_finish): bill_finish = chunk_finish else: bill_finish = era.finish_date supply_source = None supplier_contract = era.imp_supplier_contract if supplier_contract is not None: supply_source = chellow.computer.SupplySource( sess, bill_start, bill_finish, forecast_date, era, True, tmp_file, caches) if supply_source.measurement_type not in ['hh', 'amr']: kwh = sum( hh['msp-kwh'] for hh in supply_source.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh elif source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-kwh'] += kwh import_vb_function = chellow.computer.contract_func( caches, supplier_contract, 'virtual_bill', tmp_file) if import_vb_function is None: problem += "Can't find the virtual_bill function in " \ "the supplier contract. " else: import_vb_function(supply_source) v_bill = supply_source.supplier_bill if 'problem' in v_bill and len(v_bill['problem']) > 0: problem += 'Supplier Problem: ' + v_bill['problem'] try: gbp = v_bill['net-gbp'] except KeyError: problem += 'For the supply ' + \ supply_source.mpan_core + \ ' the virtual bill ' + str(v_bill) + \ ' from the contract ' + \ supplier_contract.getName() + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp elif source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp if supply_source is None: supply_source = chellow.computer.SupplySource( sess, bill_start, bill_finish, forecast_date, era, False, tmp_file, caches) dc_contract = era.hhdc_contract supply_source.contract_func( dc_contract, 'virtual_bill')(supply_source) dc_bill = supply_source.dc_bill dc_gbp = dc_bill['net-gbp'] if 'problem' in dc_bill and len(dc_bill['problem']) > 0: problem += 'DC Problem: ' + dc_bill['problem'] mop_contract = era.mop_contract mop_bill_function = supply_source.contract_func( mop_contract, 'virtual_bill') mop_bill_function(supply_source) mop_bill = supply_source.mop_bill mop_gbp = mop_bill['net-gbp'] if 'problem' in mop_bill and len(mop_bill['problem']) > 0: problem += 'MOP Problem: ' + mop_bill['problem'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += dc_gbp + mop_gbp else: month_data['import-net-gbp'] += dc_gbp + mop_gbp for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= chunk_finish, Bill.finish_date >= chunk_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = (bill_finish - bill_start).total_seconds() + \ (30 * 60) overlap_duration = ( min(bill_finish, chunk_finish) - max(bill_start, chunk_start)).total_seconds() + (30 * 60) overlap_proportion = float(overlap_duration) / bill_duration billed_gbp += overlap_proportion * float(bill.net) billed_kwh += overlap_proportion * float(bill.kwh) displaced_era = chellow.computer.displaced_era( sess, group, chunk_start, chunk_finish) site_ds = chellow.computer.SiteSource( sess, site, chunk_start, chunk_finish, forecast_date, tmp_file, caches, displaced_era) if displaced_era is not None: chellow.computer.contract_func( caches, displaced_era.imp_supplier_contract, 'displaced_virtual_bill', tmp_file)(site_ds) month_data['msp-gbp'] += site_ds.supplier_bill['net-gbp'] for stream_name in ( 'import-3rd-party', 'export-3rd-party', 'import-net', 'export-net', 'import-gen', 'export-gen', 'msp'): name = stream_name + '-kwh' month_data[name] += sum(hh[name] for hh in site_ds.hh_data) month_data['used-3rd-party-kwh'] = \ month_data['import-3rd-party-kwh'] - \ month_data['export-3rd-party-kwh'] month_data['used-3rd-party-gbp'] = month_data['import-3rd-party-gbp'] month_data['used-gbp'] += \ month_data['import-net-gbp'] + month_data['msp-gbp'] + \ month_data['used-3rd-party-gbp'] month_data['used-kwh'] += month_data['msp-kwh'] + \ month_data['used-3rd-party-kwh'] + month_data['import-net-kwh'] result = [ site.code, site.name, ','.join(associates), ','.join(sorted(list(sources))), '.'.join(sorted(list(generator_types))), hh_format(month_finish), month_data['import-net-kwh'], month_data['msp-kwh'], month_data['export-net-kwh'], month_data['used-kwh'], month_data['export-gen-kwh'], month_data['import-gen-kwh'], month_data['import-3rd-party-kwh'], month_data['export-3rd-party-kwh'], month_data['import-net-gbp'], month_data['msp-gbp'], 0, month_data['used-gbp'], month_data['used-3rd-party-gbp'], billed_kwh, billed_gbp, metering_type, problem] return result
def _process_hh(ds, rate_period, est_kw, hh): month_start, month_finish = next( c_months_u(start_year=hh["ct-year"], start_month=hh["ct-month"])) month_start_ct = to_ct(month_start) if month_start_ct.month > 3: year = month_start_ct.year else: year = month_start_ct.year - 1 financial_year_start = to_utc(ct_datetime(year, 4, 1)) last_financial_year_start = to_utc(ct_datetime(year - 1, 4, 1)) financial_year_finish = to_utc(ct_datetime(year + 1, 3, 31, 23, 30)) est_triad_kws = [] earliest_triad = None for dt in get_file_rates(ds.caches, "triad_dates", last_financial_year_start)["triad_dates"]: triad_hh = None earliest_triad = hh_min(earliest_triad, dt) try: d = next(ds.get_data_sources(dt, dt, financial_year_start)) chellow.duos.duos_vb(d) triad_hh = d.hh_data[0] while dt < financial_year_start: dt += relativedelta(years=1) for d in ds.get_data_sources(dt, dt, financial_year_start): chellow.duos.duos_vb(d) datum = d.hh_data[0] triad_hh["laf"] = datum["laf"] triad_hh["gsp-kw"] = datum["laf"] * triad_hh["msp-kw"] except StopIteration: triad_hh = { "hist-start": dt, "msp-kw": 0, "start-date": dt, "status": "before start of MPAN", "laf": 1, "gsp-kw": 0, } est_triad_kws.append(triad_hh) if ds.site is None: era = ds.supply.find_era_at(ds.sess, earliest_triad) if (era is None or era.get_channel(ds.sess, ds.is_import, "ACTIVE") is None and est_kw is None): est_kw = 0.85 * max(datum["msp-kwh"] for datum in ds.hh_data) * 2 if est_kw is not None: for est_datum in est_triad_kws: est_datum["msp-kw"] = est_kw est_datum["gsp-kw"] = est_datum["msp-kw"] * est_datum["laf"] gsp_kw = 0 for i, triad_hh in enumerate(est_triad_kws): triad_prefix = "triad-estimate-" + str(i + 1) hh[triad_prefix + "-date"] = triad_hh["hist-start"] hh[triad_prefix + "-msp-kw"] = triad_hh["msp-kw"] hh[triad_prefix + "-status"] = triad_hh["status"] hh[triad_prefix + "-laf"] = triad_hh["laf"] hh[triad_prefix + "-gsp-kw"] = triad_hh["gsp-kw"] gsp_kw += triad_hh["gsp-kw"] hh["triad-estimate-gsp-kw"] = gsp_kw / 3 polarity = "import" if ds.llfc.is_import else "export" gsp_group_code = ds.gsp_group_code rate = float( get_file_rates( ds.caches, "triad_rates", month_start)["triad_gbp_per_gsp_kw"][polarity][gsp_group_code]) hh["triad-estimate-rate"] = rate est_triad_gbp = hh["triad-estimate-rate"] * hh["triad-estimate-gsp-kw"] if rate_period == "monthly": total_intervals = 12 est_intervals = 1 hh["triad-estimate-months"] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for d in ds.get_data_sources(month_start, month_finish): for h in d.hh_data: if h["ct-decimal-hour"] == 0: est_intervals += 1 hh["triad-estimate-days"] = est_intervals hh["triad-estimate-gbp"] = est_triad_gbp / total_intervals * est_intervals if hh["ct-month"] == 3: triad_kws = [] for t_date in get_file_rates(ds.caches, "triad_dates", month_start)["triad_dates"]: try: d = next(ds.get_data_sources(t_date, t_date)) if (ds.supplier_contract is None or d.supplier_contract == ds.supplier_contract): chellow.duos.duos_vb(d) thh = d.hh_data[0] else: thh = { "hist-start": t_date, "msp-kw": 0, "start-date": t_date, "status": "before contract", "laf": "before contract", "gsp-kw": 0, } except StopIteration: thh = { "hist-start": t_date, "msp-kw": 0, "start-date": t_date, "status": "before start of supply", "laf": "before start of supply", "gsp-kw": 0, } while t_date < financial_year_start: t_date += relativedelta(years=1) try: d = next(ds.get_data_sources(t_date, t_date)) if (ds.supplier_contract is None or d.supplier_contract == ds.supplier_contract): chellow.duos.duos_vb(d) thh["laf"] = d.hh_data[0]["laf"] thh["gsp-kw"] = thh["laf"] * thh["msp-kw"] except StopIteration: pass triad_kws.append(thh) gsp_kw = 0 for i, triad_hh in enumerate(triad_kws): pref = "triad-actual-" + str(i + 1) hh[pref + "-date"] = triad_hh["start-date"] hh[pref + "-msp-kw"] = triad_hh["msp-kw"] hh[pref + "-status"] = triad_hh["status"] hh[pref + "-laf"] = triad_hh["laf"] hh[pref + "-gsp-kw"] = triad_hh["gsp-kw"] gsp_kw += triad_hh["gsp-kw"] hh["triad-actual-gsp-kw"] = gsp_kw / 3 polarity = "import" if ds.llfc.is_import else "export" gsp_group_code = ds.gsp_group_code tot_rate = 0 for start_date, finish_date, script in get_file_scripts("triad_rates"): if start_date <= financial_year_finish and not hh_before( finish_date, financial_year_start): start_month = to_ct(start_date).month if start_month < 4: start_month += 12 if finish_date is None: finish_month = 3 else: finish_month = to_ct(finish_date).month if finish_month < 4: finish_month += 12 rt = get_file_rates( ds.caches, "triad_rates", start_date )["triad_gbp_per_gsp_kw"][polarity][gsp_group_code] tot_rate += (finish_month - start_month + 1) * float(rt) rate = tot_rate / 12 hh["triad-actual-rate"] = rate hh["triad-actual-gbp"] = hh["triad-actual-rate"] * hh[ "triad-actual-gsp-kw"] era = ds.supply.find_era_at(ds.sess, month_finish) est_intervals = 0 interval = (relativedelta( months=1) if rate_period == "monthly" else relativedelta(days=1)) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = ds.supply.find_era_at(ds.sess, dt) if rate_period == "monthly": hh["triad-all-estimates-months"] = est_intervals else: hh["triad-all-estimates-days"] = est_intervals hh["triad-all-estimates-gbp"] = (est_triad_gbp / total_intervals * est_intervals * -1)
def content(start_date, finish_date, g_contract_id, user): report_context = {} sess = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'gas_virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') g_contract = GContract.get_by_id(sess, g_contract_id) forecast_date = chellow.computer.forecast_date() month_start = Datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) month_finish = month_start + relativedelta(months=1) - HH bill_titles = chellow.computer.contract_func( report_context, g_contract, 'virtual_bill_titles', None)() writer.writerow( [ 'MPRN', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) while not month_start > finish_date: period_start = start_date \ if month_start < start_date else month_start if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for g_era in sess.query(GEra).distinct().filter( or_( GEra.imp_g_contract == g_contract, GEra.exp_g_contract == g_contract), GEra.start_date <= period_finish, or_( GEra.finish_date == null(), GEra.finish_date >= period_start)): g_era_start = g_era.start_date if period_start < g_era_start: chunk_start = g_era_start else: chunk_start = period_start g_era_finish = g_era.finish_date if hh_after(period_finish, g_era_finish): chunk_finish = g_era_finish else: chunk_finish = period_finish polarities = [] if g_era.imp_g_contract == g_contract: polarities.append(True) if g_era.exp_g_contract == g_contract: polarities.append(False) for polarity in polarities: data_source = chellow.g_engine.DataSource( sess, chunk_start, chunk_finish, forecast_date, g_era, polarity, None, report_context) site = sess.query(Site).join(SiteGEra).filter( SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one() vals = [ data_source.mprn, site.code, site.name, data_source.supplier_account, hh_format(data_source.start_date), hh_format(data_source.finish_date)] chellow.computer.contract_func( report_context, g_contract, 'virtual_bill', None)(data_source) bill = data_source.bill for title in bill_titles: if title in bill: val = str(bill[title]) del bill[title] else: val = '' vals.append(val) for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def content(year, month, months, supply_id, user): tmp_file = sess = None try: sess = Session() supplies = sess.query(Supply).join(Era).distinct() if supply_id is None: base_name = "supplies_monthly_duration_for_all_supplies_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = "supplies_monthly_duration_for_" + str(supply.id) + \ "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \ ".csv" running_name, finished_name = chellow.dloads.make_names( base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) field_names = ( 'supply-name', 'source-code', 'generator-type', 'month', 'pc-code', 'msn', 'site-code', 'site-name', 'metering-type', 'import-mpan-core', 'metered-import-kwh', 'metered-import-net-gbp', 'metered-import-estimated-kwh', 'billed-import-kwh', 'billed-import-net-gbp', 'export-mpan-core', 'metered-export-kwh', 'metered-export-estimated-kwh', 'billed-export-kwh', 'billed-export-net-gbp', 'problem', 'timestamp') tmp_file.write('supply-id,' + ','.join(field_names) + '\n') forecast_date = chellow.computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)): generator_type = supply.generator_type if generator_type is None: generator_type = '' else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.make_meter_category() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() values = { 'supply-name': supply.name, 'source-code': source_code, 'generator-type': generator_type, 'month': hh_format(month_finish), 'pc-code': era.pc.code, 'msn': era.msn, 'site-code': site.code, 'site-name': site.name, 'metering-type': metering_type, 'problem': ''} tmp_file.write(str(supply.id) + ',') for is_import, pol_name in [ (True, 'import'), (False, 'export')]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + '-mpan-core'] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ['hh', 'amr']: est_kwh = sess.query(HhDatum.value).join(Channel) \ .join(Era).filter( HhDatum.status == 'E', Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).first() if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ('net', 'gen-net')): kwh_sum = sess.query( cast(func.sum(HhDatum.value), Float) ).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).one()[0] if kwh_sum is not None: kwh += kwh_sum values['metered-' + pol_name + '-estimated-kwh'] = est_kwh values['metered-' + pol_name + '-kwh'] = kwh values['metered-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-kwh'] = 0 values['billed-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-apportioned-kwh'] = 0 values['billed-' + pol_name + '-apportioned-net-gbp'] = 0 values['billed-' + pol_name + '-raw-kwh'] = 0 values['billed-' + pol_name + '-raw-net-gbp'] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = ( bill_finish - bill_start).total_seconds() + 30 * 60 overlap_duration = ( min(bill_finish, month_finish) - max(bill_start, month_start)).total_seconds() + 30 * 60 overlap_proportion = float( overlap_duration) / float(bill_duration) values['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) values['billed-import-kwh'] += \ overlap_proportion * float(bill.kwh) for era in eras: if era.start_date > month_start: chunk_start = era.start_date else: chunk_start = month_start if hh_after(era.finish_date, month_finish): chunk_finish = month_finish else: chunk_finish = era.finish_date import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ['net', 'gen-net', '3rd-party']: supply_source = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) values['metered-import-kwh'] += sum( datum['msp-kwh'] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, 'virtual_bill') if import_vb_function is None: values['problem'] += "Can't find the " \ "virtual_bill function in the supplier " \ "contract. " else: import_vb_function(supply_source) values['metered-import-net-gbp'] += \ supply_source.supplier_bill['net-gbp'] supply_source.contract_func( era.hhdc_contract, 'virtual_bill')(supply_source) values['metered-import-net-gbp'] += \ supply_source.dc_bill['net-gbp'] mop_func = supply_source.contract_func( era.mop_contract, 'virtual_bill') if mop_func is None: values['problem'] += " MOP virtual_bill " \ "function can't be found." else: mop_func(supply_source) mop_bill = supply_source.mop_bill values['metered-import-net-gbp'] += \ mop_bill['net-gbp'] if len(mop_bill['problem']) > 0: values['problem'] += \ " MOP virtual bill problem: " + \ mop_bill['problem'] values['timestamp'] = int(time.time() * 1000) tmp_file.write( ','.join( '"' + str(values[name]) + '"' for name in field_names) + '\n') except: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def make_raw_bills(self): raw_bills = [] for self.line_number, code in enumerate(self.parser): if code == "CLO": cloc = self.parser.elements[0] account = cloc[1] elif code == "BCD": ivdt = self.parser.elements[0] invn = self.parser.elements[2] btcd = self.parser.elements[5] reference = invn[0] bill_type_code = btcd[0] issue_date = to_date(ivdt[0]) elif code == "MHD": typ = self.parser.elements[1] message_type = typ[0] if message_type == "UTLBIL": issue_date = None start_date = None finish_date = None account = None reference = None net = Decimal(0.00) vat = Decimal(0.00) reads = [] mpan_core = None elif code == "CCD": ccde = self.parser.elements[1] consumption_charge_indicator = ccde[0] charge_type = ccde[2] if consumption_charge_indicator != "5" and charge_type in [ "7", "8", "9", ]: prev_read_date = to_date(self.parser.elements[7][0]) if hh_after(start_date, prev_read_date): start_date = prev_read_date register_finish_date = to_date(self.parser.elements[6][0]) if finish_date is None or finish_date < register_finish_date: finish_date = register_finish_date if charge_type == "7": tmod = self.parser.elements[3] mtnr = self.parser.elements[4] mloc = self.parser.elements[5] prrd = self.parser.elements[9] adjf = self.parser.elements[12] pres_read_type = read_type_map[prrd[1]] prev_read_type = read_type_map[prrd[3]] coefficient = Decimal(adjf[1]) / Decimal(100000) pres_read_value = Decimal(prrd[0]) / Decimal(1000) prev_read_value = Decimal(prrd[2]) / Decimal(1000) msn = mtnr[0] tpr_code = tmod[0].zfill(5) read = { "msn": msn, "mpan": mloc[0], "coefficient": coefficient, "units": "kWh", "tpr_code": tpr_code, "prev_date": prev_read_date, "prev_value": prev_read_value, "prev_type_code": prev_read_type, "pres_date": register_finish_date, "pres_value": pres_read_value, "pres_type_code": pres_read_type, } reads.append(read) elif code == "MTR": if message_type == "UTLBIL": raw_bill = { "bill_type_code": bill_type_code, "account": account, "mpan_core": mpan_core, "reference": reference, "issue_date": issue_date, "start_date": start_date, "finish_date": finish_date, "kwh": Decimal(0), "net": net, "vat": vat, "gross": Decimal("0.00"), "breakdown": {}, "reads": reads, } raw_bills.append(raw_bill) elif code == "MAN": madn = self.parser.elements[2] # pc_code = "0" + madn[3] # mtc_code = madn[4] # llfc_code = madn[5] mpan_core = parse_mpan_core(madn[0] + " " + madn[1] + madn[2]) elif code == "VAT": uvla = self.parser.elements[5] net = Decimal("0.00") + to_decimal(uvla) uvtt = self.parser.elements[6] vat = Decimal("0.00") + to_decimal(uvtt) return raw_bills
def content(year, supply_id, sess): yield "MPAN Core,Site Id,Site Name,Date,Event," year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({'date': date, 'code': code, 'mpan-core': mpan_core}) try: if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] yield '\n' + ','.join( '"' + str(val) + '"' for val in vals) + ',' else: yield ' ' except: yield traceback.format_exc()
def laf_days(sess, progress, csv_file): llfc_ids = [] timestamps = [] values = [] llfc_code = line_dt_ct = dno = llfc_id = None llfc_valid_to = UTC_DATETIME_MIN timestamp_cache = {} for line_number, vals in enumerate(csv.reader(csv_file, delimiter="|")): progress["line_number"] = line_number code = vals[0] if code == "DIS": participant_code = vals[1] dno = Party.get_by_participant_code_role_code( sess, participant_code, "R") elif code == "LLF": llfc_code = vals[1] llfc_valid_to = UTC_DATETIME_MIN if len(llfc_ids) > 0: yield llfc_ids, timestamps, values llfc_ids = [] timestamps = [] values = [] elif code == "SDT": line_dt_str = vals[1] line_dt_ct = to_ct(Datetime.strptime(line_dt_str, "%Y%m%d")) elif code == "SPL": period, value = vals[1:] try: timestamp = timestamp_cache[line_dt_ct][period] except KeyError: try: day_cache = timestamp_cache[line_dt_ct] except KeyError: day_cache = timestamp_cache[line_dt_ct] = {} timestamp = day_cache[period] = to_utc(line_dt_ct + Timedelta( minutes=30 * (int(period) - 1))) if hh_after(timestamp, llfc_valid_to): llfc = dno.find_llfc_by_code(sess, llfc_code, timestamp) if llfc is None: continue llfc_id, llfc_valid_to = llfc.id, llfc.valid_to llfc_ids.append(llfc_id) timestamps.append(timestamp) values.append(Decimal(value)) elif code == "ZPT": earliest_list = sorted(timestamp_cache.keys()) if len(earliest_list) > 0: conf = Contract.get_non_core_by_name(sess, "configuration") props = conf.make_properties() try: laf_importer = props["laf_importer"] except KeyError: laf_importer = props["laf_importer"] = {} laf_importer[dno.participant.code] = min(earliest_list) conf.update_properties(props) sess.commit() if len(llfc_ids) > 0: yield llfc_ids, timestamps, values
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append({ 'date': date, 'code': code, 'mpan-core': mpan_core }) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start)).order_by( Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event(events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.dc_contract_id != prev_era.dc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event(events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event(events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event(events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code'] ] writer.writerow(vals) # Avoid a long-running transaction sess.rollback() except BaseException: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def hh(data_source, rate_period='monthly'): for hh in (h for h in data_source.hh_data if h['ct-is-month-end']): hh_start = hh['start-date'] month_start = utc_datetime(hh_start.year, hh_start.month) month_finish = month_start + relativedelta(months=1) - HH month_num = month_start.month # Get start of last financial year financial_year_start = month_start while financial_year_start.month != 4: financial_year_start -= relativedelta(months=1) last_financial_year_start = financial_year_start - \ relativedelta(years=1) financial_year_finish = financial_year_start + \ relativedelta(years=1) - HH triad_dates = [] earliest_triad = None for dt in data_source.hh_rate( db_id, last_financial_year_start, 'triad_dates'): triad_dates.append(dt + relativedelta(years=1)) if earliest_triad is None or dt < earliest_triad: earliest_triad = dt est_triad_kws = [] for t_date in triad_dates: for ds in chellow.computer.get_data_sources( data_source, t_date, t_date, financial_year_start): chellow.duos.duos_vb(ds) est_triad_kws.append(ds.hh_data[0]) if data_source.site is None: era = data_source.supply.find_era_at( data_source.sess, earliest_triad) if era is None or era.get_channel( data_source.sess, data_source.is_import, 'ACTIVE') is None: est_triad_kw = 0.85 * max( datum['msp-kwh'] for datum in data_source.hh_data) * 2 for est_datum in est_triad_kws: est_datum['msp-kw'] = est_triad_kw est_datum['gsp-kw'] = est_datum['msp-kw'] * \ est_datum['laf'] triad_calc( hh, 'triad-estimate', est_triad_kws, financial_year_start, financial_year_finish, data_source, month_start) est_triad_gbp = hh['triad-estimate-rate'] * hh['triad-estimate-gsp-kw'] if rate_period == 'monthly': total_intervals = 12 est_intervals = 1 hh['triad-estimate-months'] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for ds in chellow.computer.get_data_sources( data_source, month_start, month_finish): for h in ds.hh_data: if h['utc-decimal-hour'] == 0: est_intervals += 1 hh['triad-estimate-days'] = est_intervals hh['triad-estimate-gbp'] = est_triad_gbp / total_intervals * \ est_intervals if month_num == 3: triad_kws = [] for t_date in data_source.hh_rate( db_id, month_start, 'triad_dates'): try: ds = next( iter( chellow.computer.get_data_sources( data_source, t_date, t_date))) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh = ds.hh_data[0] else: thh = { 'hist-start': t_date, 'msp-kw': 0, 'status': 'before contract', 'laf': 'before contract', 'gsp-kw': 0} except StopIteration: thh = { 'hist-start': t_date, 'msp-kw': 0, 'status': 'before start of supply', 'laf': 'before start of supply', 'gsp-kw': 0} triad_kws.append(thh) triad_calc( hh, 'triad-actual', triad_kws, financial_year_start, financial_year_finish, data_source, month_start) hh['triad-actual-gbp'] = hh['triad-actual-rate'] * \ hh['triad-actual-gsp-kw'] era = data_source.supply.find_era_at( data_source.sess, month_finish) est_intervals = 0 interval = relativedelta(months=1) if \ rate_period == 'monthly' else relativedelta(days=1) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = data_source.supply.find_era_at(data_source.sess, dt) if rate_period == 'monthly': hh['triad-all-estimates-months'] = est_intervals else: hh['triad-all-estimates-days'] = est_intervals hh['triad-all-estimates-gbp'] = est_triad_gbp / \ total_intervals * est_intervals * -1
def content(year, supply_id, user): sess = f = writer = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'crc_special_events.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') writer.writerow(("MPAN Core", "Site Id", "Site Name", "Date", "Event")) year_start = datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = prev_hh(datetime(year + 1, 4, 1, tzinfo=pytz.utc)) def add_event(events, date, code, era=None, mpan_core=None): if era is None: mpan_cores = [mpan_core] else: mpan_cores = [] if era.imp_mpan_core is not None: mpan_cores.append(era.imp_mpan_core) if era.exp_mpan_core is not None: mpan_cores.append(era.exp_mpan_core) for mpan_core in mpan_cores: events.append( {'date': date, 'code': code, 'mpan-core': mpan_core}) if supply_id is None: supplies = sess.query(Supply).join(Source).join(Era).filter( Source.code.in_(('net', 'gen-net', 'gen')), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct() else: supply = Supply.get_by_id(supply_id) supplies = sess.query(Supply).filter(Supply.id == supply.id) for supply in supplies: eras = sess.query(Era).filter( Era.supply == supply, Era.start_date <= year_finish, or_(Era.finish_date == null(), Era.finish_date >= year_start) ).order_by(Era.start_date).all() events = [] first_era = eras[0] first_era_start = first_era.start_date if hh_after(first_era_start, year_start): add_event(events, first_era_start, "New Supply", first_era) last_era = eras[-1] last_era_finish = last_era.finish_date if hh_before(last_era_finish, year_finish): add_event(events, last_era_finish, "Disconnection", last_era) prev_era = first_era for era in eras[1:]: if era.msn != prev_era.msn: add_event(events, era.start_date, "Meter Change", era) if era.pc.code != prev_era.pc.code: add_event( events, era.start_date, "Change Of Profile Class", era) if era.mop_contract_id != prev_era.mop_contract_id: add_event(events, era.start_date, "Change Of MOP", era) if era.hhdc_contract_id != prev_era.hhdc_contract_id: add_event(events, era.start_date, "Change Of DC", era) for is_import in [True, False]: if era.imp_mpan_core is None: mpan_core = era.exp_mpan_core else: mpan_core = era.imp_mpan_core if is_import: cur_sup = era.imp_supplier_contract prev_sup = prev_era.imp_supplier_contract else: cur_sup = era.exp_supplier_contract prev_sup = prev_era.exp_supplier_contract if cur_sup is None and prev_sup is not None: add_event( events, era.start_date, "End of supply", mpan_core) elif cur_sup is not None and prev_sup is None: add_event( events, era.start_date, "Start of supply", None, mpan_core) elif cur_sup is not None and \ prev_sup is not None and cur_sup != prev_sup: add_event( events, era.start_date, "Change Of Supplier", None, mpan_core) prev_era = era if len(events) > 0: site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era == last_era).one() for event in events: vals = [ event['mpan-core'], site.code, site.name, event['date'].strftime("%Y-%m-%d %H:%M"), event['code']] writer.writerow(vals) except: msg = traceback.format_exc() sys.stderr.write(msg) writer.writerow([msg]) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def make_raw_bills(self): raw_bills = [] for self.line_number, code in enumerate(self.parser): if code == "CLO": cloc = self.parser.elements[0] account = cloc[1] elif code == "BCD": ivdt = self.parser.elements[0] invn = self.parser.elements[2] btcd = self.parser.elements[5] reference = invn[0] bill_type_code = btcd[0] issue_date = to_date(ivdt[0]) elif code == "MHD": typ = self.parser.elements[1] message_type = typ[0] if message_type == "UTLBIL": issue_date = None start_date = None finish_date = None account = None reference = None net = Decimal(0.00) vat = Decimal(0.00) reads = [] mpan_strings = [] elif code == "CCD": ccde = self.parser.elements[1] consumption_charge_indicator = ccde[0] charge_type = ccde[2] if consumption_charge_indicator != "5" and \ charge_type in ["7", "8", "9"]: prev_read_date = to_date(self.parser.elements[7][0]) if hh_after(start_date, prev_read_date): start_date = prev_read_date register_finish_date = to_date(self.parser.elements[6][0]) if finish_date is None or finish_date < register_finish_date: finish_date = register_finish_date if charge_type == "7": tmod = self.parser.elements[3] mtnr = self.parser.elements[4] mloc = self.parser.elements[5] prrd = self.parser.elements[9] adjf = self.parser.elements[12] pres_read_type = read_type_map[prrd[1]] prev_read_type = read_type_map[prrd[3]] coefficient = Decimal(adjf[1]) / Decimal(100000) pres_read_value = Decimal(prrd[0]) / Decimal(1000) prev_read_value = Decimal(prrd[2]) / Decimal(1000) msn = mtnr[0] tpr_code = tmod[0].zfill(5) reads.append({ 'msn': msn, 'mpan': mloc[0], 'coefficient': coefficient, 'units': 'kWh', 'tpr_code': tpr_code, 'prev_date': prev_read_date, 'prev_value': prev_read_value, 'prev_type_code': prev_read_type, 'pres_date': register_finish_date, 'pres_value': pres_read_value, 'pres_type_code': pres_read_type }) elif code == "MTR": if message_type == "UTLBIL": raw_bills.append({ 'bill_type_code': bill_type_code, 'account': account, 'mpans': mpan_strings, 'reference': reference, 'issue_date': issue_date, 'start_date': start_date, 'finish_date': finish_date, 'kwh': Decimal(0), 'net': net, 'vat': vat, 'gross': Decimal('0.00'), 'breakdown': {}, 'reads': reads }) elif code == "MAN": madn = self.parser.elements[2] pc_code = "0" + madn[3] mtc_code = madn[4] llfc_code = madn[5] mpan_strings.append(pc_code + " " + mtc_code + " " + llfc_code + " " + madn[0] + " " + madn[1] + madn[2]) elif code == "VAT": uvla = self.parser.elements[5] net = Decimal('0.00') + to_decimal(uvla) uvtt = self.parser.elements[6] vat = Decimal('0.00') + to_decimal(uvtt) return raw_bills
def content(year, supply_id, user): f = sess = None try: sess = Session() fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = chellow.dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, "w") ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] f.write( ','.join( ( 'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) + '\n') year_start = Datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)): meter_type = era.make_meter_category() era_start = era.start_date period_start = era_start \ if era_start > year_start else year_start era_finish = era.finish_date if hh_after(era_finish, year_finish): period_finish = year_finish else: period_finish = era_finish max_normal_days[meter_type] += ( (period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply == supply, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = next(pres_reads) except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply == supply, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill == pres_bill, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = next(prev_reads) except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = ( ( pair_finish_date + HH - pair_start_date).total_seconds() ) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].items(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = kwh / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and \ (not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: if pair_start > period_start: block_start = pair_start else: block_start = period_start if pair_finish < period_finish: block_finish = pair_finish else: block_finish = period_finish if block_start <= block_finish: normal_days[meter_type] += ( ( block_finish - block_start ).total_seconds() + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = ( ( pair['finish-date'] - pair['start-date'] ).total_seconds() + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].items(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish).order_by( HhDatum.id)) year_kwhs = list( float(v[0]) for v in sess.query(HhDatum.value). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish).order_by( HhDatum.id)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = ( period_finish + HH - period_start ).total_seconds() / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += year_sum_kwhs / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += sess.query( func.count(HhDatum.value)).join(Channel). \ filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0] / 48 elif meter_type == 'unmetered': bills = sess.query(Bill).filter( Bill.supply == supply, Bill.finish_date >= period_start, Bill.start_date <= period_finish) for bill in bills: total_kwh[meter_type] += kwh normal_days[meter_type] += ( ( period_finish - period_start).total_seconds() + 60 * 30) / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = total_normal_days / total_max_normal_days >= 183 / 365 f.write( ','.join( '"' + str(val) + '"' for val in [ supply.id, mpan_core, site.code, site.name, hh_format(year_start), hh_format(year_finish), breakdown] + [ normal_days[type] for type in meter_types] + [ max_normal_days[type] for type in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[type] for type in meter_types] + [filled_kwh[type] for type in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) + '\n') # avoid a long running transaction sess.rollback() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if sess is not None: sess.close() if f is not None: f.close() os.rename(running_name, finished_name)
def hh(data_source, rate_period='monthly', est_kw=None): for hh in (h for h in data_source.hh_data if h['ct-is-month-end']): hh_start = hh['start-date'] month_start = utc_datetime(hh_start.year, hh_start.month) month_finish = month_start + relativedelta(months=1) - HH financial_year_start = month_start while financial_year_start.month != 4: financial_year_start -= relativedelta(months=1) last_financial_year_start = financial_year_start - relativedelta( years=1) financial_year_finish = financial_year_start + relativedelta( years=1) - HH est_triad_kws = [] earliest_triad = None for dt in get_file_rates( data_source.caches, 'triad_dates', last_financial_year_start)['triad_dates']: triad_hh = None earliest_triad = hh_min(earliest_triad, dt) try: ds = next( data_source.get_data_sources(dt, dt, financial_year_start)) chellow.duos.duos_vb(ds) triad_hh = ds.hh_data[0] while dt < financial_year_start: dt += relativedelta(years=1) for ds in data_source.get_data_sources( dt, dt, financial_year_start): chellow.duos.duos_vb(ds) datum = ds.hh_data[0] triad_hh['laf'] = datum['laf'] triad_hh['gsp-kw'] = datum['laf'] * triad_hh['msp-kw'] except StopIteration: triad_hh = { 'hist-start': dt, 'msp-kw': 0, 'start-date': dt, 'status': 'before start of MPAN', 'laf': 1, 'gsp-kw': 0} est_triad_kws.append(triad_hh) if data_source.site is None: era = data_source.supply.find_era_at( data_source.sess, earliest_triad) if era is None or era.get_channel( data_source.sess, data_source.is_import, 'ACTIVE') is None: if est_kw is not None: est_triad_kw = est_kw else: est_triad_kw = 0.85 * max( datum['msp-kwh'] for datum in data_source.hh_data) * 2 for est_datum in est_triad_kws: est_datum['msp-kw'] = est_triad_kw est_datum['gsp-kw'] = est_datum['msp-kw'] * \ est_datum['laf'] gsp_kw = 0 for i, triad_hh in enumerate(est_triad_kws): triad_prefix = 'triad-estimate-' + str(i + 1) hh[triad_prefix + '-date'] = triad_hh['hist-start'] hh[triad_prefix + '-msp-kw'] = triad_hh['msp-kw'] hh[triad_prefix + '-status'] = triad_hh['status'] hh[triad_prefix + '-laf'] = triad_hh['laf'] hh[triad_prefix + '-gsp-kw'] = triad_hh['gsp-kw'] gsp_kw += triad_hh['gsp-kw'] hh['triad-estimate-gsp-kw'] = gsp_kw / 3 polarity = 'import' if data_source.llfc.is_import else 'export' gsp_group_code = data_source.gsp_group_code rate = float( get_file_rates( data_source.caches, 'triad_rates', month_start)['triad_gbp_per_gsp_kw'][polarity][gsp_group_code]) hh['triad-estimate-rate'] = rate est_triad_gbp = hh['triad-estimate-rate'] * hh['triad-estimate-gsp-kw'] if rate_period == 'monthly': total_intervals = 12 est_intervals = 1 hh['triad-estimate-months'] = est_intervals else: dt = financial_year_start total_intervals = 0 while dt <= financial_year_finish: total_intervals += 1 dt += relativedelta(days=1) est_intervals = 0 for ds in data_source.get_data_sources(month_start, month_finish): for h in ds.hh_data: if h['utc-decimal-hour'] == 0: est_intervals += 1 hh['triad-estimate-days'] = est_intervals hh['triad-estimate-gbp'] = est_triad_gbp / total_intervals * \ est_intervals if month_start.month == 3: triad_kws = [] for t_date in get_file_rates( data_source.caches, 'triad_dates', month_start)['triad_dates']: try: ds = next(data_source.get_data_sources(t_date, t_date)) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh = ds.hh_data[0] else: thh = { 'hist-start': t_date, 'msp-kw': 0, 'start-date': t_date, 'status': 'before contract', 'laf': 'before contract', 'gsp-kw': 0} except StopIteration: thh = { 'hist-start': t_date, 'msp-kw': 0, 'start-date': t_date, 'status': 'before start of supply', 'laf': 'before start of supply', 'gsp-kw': 0} while t_date < financial_year_start: t_date += relativedelta(years=1) try: ds = next(data_source.get_data_sources(t_date, t_date)) if data_source.supplier_contract is None or \ ds.supplier_contract == \ data_source.supplier_contract: chellow.duos.duos_vb(ds) thh['laf'] = ds.hh_data[0]['laf'] thh['gsp-kw'] = thh['laf'] * thh['msp-kw'] except StopIteration: pass triad_kws.append(thh) gsp_kw = 0 for i, triad_hh in enumerate(triad_kws): pref = 'triad-actual-' + str(i + 1) hh[pref + '-date'] = triad_hh['start-date'] hh[pref + '-msp-kw'] = triad_hh['msp-kw'] hh[pref + '-status'] = triad_hh['status'] hh[pref + '-laf'] = triad_hh['laf'] hh[pref + '-gsp-kw'] = triad_hh['gsp-kw'] gsp_kw += triad_hh['gsp-kw'] hh['triad-actual-gsp-kw'] = gsp_kw / 3 polarity = 'import' if data_source.llfc.is_import else 'export' gsp_group_code = data_source.gsp_group_code tot_rate = 0 for start_date, finish_date, script in get_file_scripts( 'triad_rates'): if start_date <= financial_year_finish and not hh_before( finish_date, financial_year_start): start_month = start_date.month if start_month < 4: start_month += 12 if finish_date is None: finish_month = financial_year_finish.month else: finish_month = finish_date.month if finish_month < 4: finish_month += 12 rt = get_file_rates( data_source.caches, 'triad_rates', start_date )['triad_gbp_per_gsp_kw'][polarity][gsp_group_code] tot_rate += (finish_month - start_month + 1) * float(rt) rate = tot_rate / 12 hh['triad-actual-rate'] = rate hh['triad-actual-gbp'] = hh['triad-actual-rate'] * \ hh['triad-actual-gsp-kw'] era = data_source.supply.find_era_at( data_source.sess, month_finish) est_intervals = 0 interval = relativedelta(months=1) if \ rate_period == 'monthly' else relativedelta(days=1) dt = month_finish while era is not None and dt > financial_year_start: est_intervals += 1 dt -= interval if hh_after(dt, era.finish_date): era = data_source.supply.find_era_at(data_source.sess, dt) if rate_period == 'monthly': hh['triad-all-estimates-months'] = est_intervals else: hh['triad-all-estimates-days'] = est_intervals hh['triad-all-estimates-gbp'] = est_triad_gbp / \ total_intervals * est_intervals * -1
def content(start_date, finish_date, contract_id, user): caches = {} sess = f = None try: sess = Session() running_name, finished_name = chellow.dloads.make_names( 'virtual_bills.csv', user) f = open(running_name, mode='w', newline='') writer = csv.writer(f, lineterminator='\n') contract = Contract.get_supplier_by_id(sess, contract_id) forecast_date = chellow.computer.forecast_date() month_start = Datetime( start_date.year, start_date.month, 1, tzinfo=pytz.utc) month_finish = month_start + relativedelta(months=1) - HH bill_titles = contract_func( caches, contract, 'virtual_bill_titles', None)() writer.writerow( ['MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) while not month_start > finish_date: period_start = start_date \ if month_start < start_date else month_start if month_finish > finish_date: period_finish = finish_date else: period_finish = month_finish for era in sess.query(Era).distinct().filter( or_( Era.imp_supplier_contract_id == contract.id, Era.exp_supplier_contract_id == contract.id), Era.start_date <= period_finish, or_( Era.finish_date == null(), Era.finish_date >= period_start)): era_start = era.start_date if period_start < era_start: chunk_start = era_start else: chunk_start = period_start era_finish = era.finish_date if hh_after(period_finish, era_finish): chunk_finish = era_finish else: chunk_finish = period_finish polarities = [] if era.imp_supplier_contract == contract: polarities.append(True) if era.exp_supplier_contract == contract: polarities.append(False) for polarity in polarities: vals = [] data_source = SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, polarity, None, caches) site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() vals = [ data_source.mpan_core, site.code, site.name, data_source.supplier_account, hh_format(data_source.start_date), hh_format(data_source.finish_date)] contract_func( caches, contract, 'virtual_bill', None)(data_source) bill = data_source.supplier_bill for title in bill_titles: if title in bill: val = str(bill[title]) del bill[title] else: val = '' vals.append(val) for k in sorted(bill.keys()): vals.append(k) vals.append(str(bill[k])) writer.writerow(vals) month_start += relativedelta(months=1) month_finish = month_start + relativedelta(months=1) - HH except BadRequest as e: writer.writerow([e.description]) except: writer.writerow([traceback.format_exc()]) finally: if sess is not None: sess.close() f.close() os.rename(running_name, finished_name)
def content( start_year, start_month, start_day, finish_year, finish_month, finish_day, is_import, supply_id, sess): try: start_date = Datetime( start_year, start_month, start_day, tzinfo=pytz.utc) finish_date = Datetime( finish_year, finish_month, finish_day, tzinfo=pytz.utc) + \ relativedelta(days=1) - HH caches = {} supply = Supply.get_by_id(sess, supply_id) forecast_date = chellow.computer.forecast_date() day_start = start_date prev_bill_titles = [] while not day_start > finish_date: day_finish = day_start + relativedelta(days=1) - HH for era in supply.find_eras(sess, day_start, day_finish): if era.start_date > day_start: chunk_start = era.start_date else: chunk_start = day_start if hh_after(era.finish_date, day_finish): chunk_finish = day_finish else: chunk_finish = era.finish_date ss = chellow.computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, is_import, None, caches) sup_con = ss.supplier_contract bill_titles = chellow.computer.contract_func( caches, sup_con, 'virtual_bill_titles', None)() if bill_titles != prev_bill_titles: yield ','.join( [ 'MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To'] + bill_titles) + '\n' prev_bill_titles = bill_titles site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() yield ','.join('"' + str(value) + '"' for value in [ ss.mpan_core, site.code, site.name, ss.supplier_account, hh_format(ss.start_date), hh_format(ss.finish_date)]) chellow.computer.contract_func( caches, sup_con, 'virtual_bill', None)(ss) bill = ss.supplier_bill for title in bill_titles: if title in bill: val_raw = bill[title] if isinstance(val_raw, Datetime): val = hh_format(val_raw) else: val = str(val_raw) yield ',"' + val + '"' del bill[title] else: yield ',""' for k in sorted(bill.keys()): yield ',"' + k + '","' + str(bill[k]) + '"' yield '\n' day_start += relativedelta(days=1) except: yield traceback.format_exc()