def long_process(): sess = None tmp_file = None try: sess = db.session() if st_id is None: st = None base_name = "site_monthly_duration_for_all_site_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: st = Site.get_by_id(sess, st_id) base_name = "site_monthly_duration_for_" + st.code + "_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" running_name, finished_name = dloads.make_names(base_name, user) tmp_file = open(running_name, "w") forecast_date = computer.forecast_date() tmp_file.write( "Site Id,Site Name,Associated Site Ids,Sources," "Generator Types,Month,Metered Imported kWh," "Metered Displaced kWh,Metered Exported kWh,Metered Used kWh," "Metered Parasitic kWh,Metered Generated kWh," "Metered 3rd Party Import kWh,Metered 3rd Party Export kWh," "Metered Imported GBP,Metered Displaced GBP,Metered Exported GBP," "Metered Used GBP,Metered 3rd Party Import GBP," "Billed Imported kWh,Billed Imported GBP,Metering Type,Problem") for i in range(months): sites = sess.query(Site).order_by(Site.code) if st is not None: sites = sites.filter(Site.id == st.id) for site in sites: month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH tmp_file.write( '\r\n' + ','.join( '"' + str(value) + '"' for value in process_site( sess, site, month_start, month_finish, forecast_date, tmp_file))) tmp_file.flush() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') tmp_file.write("Problem " + msg) finally: try: if sess is not None: sess.close() except: tmp_file.write("\nProblem closing session.") finally: tmp_file.close() os.rename(running_name, finished_name)
def content(): sess = None tmp_file = None try: sess = db.session() supplies = sess.query(Supply).join(Era).distinct() if supply_id is None: base_name = "supplies_monthly_duration_for_all_supplies_for_" + \ str(months) + "_to_" + str(year) + "_" + str(month) + ".csv" else: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) base_name = "supplies_monthly_duration_for_" + str(supply.id) + \ "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \ ".csv" running_name, finished_name = dloads.make_names(base_name, user) tmp_file = open(running_name, "w") caches = {} start_date = datetime.datetime(year, month, 1, tzinfo=pytz.utc) - \ relativedelta(months=months-1) field_names = ( 'supply-name', 'source-code', 'generator-type', 'month', 'pc-code', 'msn', 'site-code', 'site-name', 'metering-type', 'import-mpan-core', 'metered-import-kwh', 'metered-import-net-gbp', 'metered-import-estimated-kwh', 'billed-import-kwh', 'billed-import-net-gbp', 'export-mpan-core', 'metered-export-kwh', 'metered-export-estimated-kwh', 'billed-export-kwh', 'billed-export-net-gbp', 'problem', 'timestamp') tmp_file.write('supply-id,' + ','.join(field_names) + '\n') forecast_date = computer.forecast_date() for i in range(months): month_start = start_date + relativedelta(months=i) month_finish = month_start + relativedelta(months=1) - HH for supply in supplies.filter( Era.start_date <= month_finish, or_( Era.finish_date == null(), Era.finish_date >= month_start)): generator_type = supply.generator_type if generator_type is None: generator_type = '' else: generator_type = generator_type.code source_code = supply.source.code eras = supply.find_eras(sess, month_start, month_finish) era = eras[-1] metering_type = era.make_meter_category() site = sess.query(Site).join(SiteEra).filter( SiteEra.era == era, SiteEra.is_physical == true()).one() values = { 'supply-name': supply.name, 'source-code': source_code, 'generator-type': generator_type, 'month': hh_format(month_finish), 'pc-code': era.pc.code, 'msn': era.msn, 'site-code': site.code, 'site-name': site.name, 'metering-type': metering_type, 'problem': ''} tmp_file.write(str(supply.id) + ',') for is_import, pol_name in [ (True, 'import'), (False, 'export')]: if is_import: mpan_core = era.imp_mpan_core else: mpan_core = era.exp_mpan_core values[pol_name + '-mpan-core'] = mpan_core kwh = 0 est_kwh = 0 if metering_type in ['hh', 'amr']: est_kwh = sess.query(HhDatum.value).join(Channel) \ .join(Era).filter( HhDatum.status == 'E', Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).first() if est_kwh is None: est_kwh = 0 else: est_kwh = est_kwh[0] if not (is_import and source_code in ('net', 'gen-net')): kwh_sum = sess.query( cast(func.sum(HhDatum.value), Float) ).join(Channel).join(Era).filter( Era.supply_id == supply.id, Channel.channel_type == 'ACTIVE', Channel.imp_related == is_import, HhDatum.start_date >= month_start, HhDatum.start_date <= month_finish).one()[0] if kwh_sum is not None: kwh += kwh_sum values['metered-' + pol_name + '-estimated-kwh'] = est_kwh values['metered-' + pol_name + '-kwh'] = kwh values['metered-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-kwh'] = 0 values['billed-' + pol_name + '-net-gbp'] = 0 values['billed-' + pol_name + '-apportioned-kwh'] = 0 values['billed-' + pol_name + '-apportioned-net-gbp'] = 0 values['billed-' + pol_name + '-raw-kwh'] = 0 values['billed-' + pol_name + '-raw-net-gbp'] = 0 for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= month_finish, Bill.finish_date >= month_start): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = totalseconds( bill_finish - bill_start) + 30 * 60 overlap_duration = totalseconds( min(bill_finish, month_finish) - max(bill_start, month_start)) + 30 * 60 overlap_proportion = float( overlap_duration) / float(bill_duration) values['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) values['billed-import-kwh'] += \ overlap_proportion * float(bill.kwh) for era in eras: if era.start_date > month_start: chunk_start = era.start_date else: chunk_start = month_start if hh_after(era.finish_date, month_finish): chunk_finish = month_finish else: chunk_finish = era.finish_date import_mpan_core = era.imp_mpan_core if import_mpan_core is None: continue supplier_contract = era.imp_supplier_contract if source_code in ['net', 'gen-net', '3rd-party']: supply_source = computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches) values['metered-import-kwh'] += sum( datum['msp-kwh'] for datum in supply_source.hh_data) import_vb_function = supply_source.contract_func( supplier_contract, 'virtual_bill') if import_vb_function is None: values['problem'] += "Can't find the " \ "virtual_bill function in the supplier " \ "contract. " else: import_vb_function(supply_source) values['metered-import-net-gbp'] += \ supply_source.supplier_bill['net-gbp'] supply_source.contract_func( era.hhdc_contract, 'virtual_bill')(supply_source) values['metered-import-net-gbp'] += \ supply_source.dc_bill['net-gbp'] mop_func = supply_source.contract_func( era.mop_contract, 'virtual_bill') if mop_func is None: values['problem'] += " MOP virtual_bill " \ "function can't be found." else: mop_func(supply_source) mop_bill = supply_source.mop_bill values['metered-import-net-gbp'] += \ mop_bill['net-gbp'] if len(mop_bill['problem']) > 0: values['problem'] += \ " MOP virtual bill problem: " + \ mop_bill['problem'] values['timestamp'] = int(time.time() * 1000) tmp_file.write( ','.join( '"' + str(values[name]) + '"' for name in field_names) + '\n') except: tmp_file.write(traceback.format_exc()) finally: if sess is not None: sess.close() tmp_file.close() os.rename(running_name, finished_name)
def content(): sess = None f = None try: fname = ['crc', str(year), str(year + 1)] if supply_id is None: fname.append('all_supplies') else: fname.append('supply_' + str(supply_id)) running_name, finished_name = dloads.make_names( '_'.join(fname) + '.csv', user) f = open(running_name, "w") sess = db.session() ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP'] f.write( ','.join( ( 'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name', 'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days', 'Actual AMR Normal Days', 'Actual NHH Normal Days', 'Actual Unmetered Normal Days', 'Max HH Normal Days', 'Max AMR Normal Days', 'Max NHH Normal Days', 'Max Unmetered Normal Days', 'Total Actual Normal Days', 'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh', 'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh', 'Total kWh', 'Note')) + '\n') year_start = datetime.datetime(year, 4, 1, tzinfo=pytz.utc) year_finish = year_start + relativedelta(years=1) - HH supplies = sess.query(Supply).join(Era).join(Source).filter( Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(), Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)).distinct().order_by(Supply.id) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) meter_types = ('hh', 'amr', 'nhh', 'unmetered') for supply in supplies: total_kwh = dict([(mtype, 0) for mtype in meter_types]) filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')]) normal_days = dict([(mtype, 0) for mtype in meter_types]) max_normal_days = dict([(mtype, 0) for mtype in meter_types]) breakdown = '' for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.start_date <= year_finish, or_( Era.finish_date == null(), Era.finish_date >= year_start)): meter_type = era.make_meter_category() era_start = era.start_date period_start = era_start \ if era_start > year_start else year_start era_finish = era.finish_date if hh_after(era_finish, year_finish): period_finish = year_finish else: period_finish = era_finish max_normal_days[meter_type] += float( totalseconds(period_finish - period_start) + 60 * 30) / \ (60 * 60 * 24) mpan_core = era.imp_mpan_core site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if meter_type == 'nhh': read_list = [] read_keys = {} pairs = [] prior_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply_id == supply.id, RegisterRead.present_date < period_start, BillType.code != 'W').order_by( RegisterRead.present_date.desc())) prior_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply_id == supply.id, RegisterRead.previous_date < period_start, BillType.code != 'W').order_by( RegisterRead.previous_date.desc())) next_pres_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType) .join(RegisterRead.present_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply_id == supply.id, RegisterRead.present_date >= period_start, BillType.code != 'W').order_by( RegisterRead.present_date)) next_prev_reads = iter( sess.query(RegisterRead).join(Bill).join(BillType). join(RegisterRead.previous_type).filter( RegisterRead.units == 0, ReadType.code.in_(ACTUAL_READ_TYPES), Bill.supply_id == supply.id, RegisterRead.previous_date >= period_start, BillType.code != 'W').order_by( RegisterRead.previous_date)) for is_forwards in [False, True]: if is_forwards: pres_reads = next_pres_reads prev_reads = next_prev_reads read_list.reverse() else: pres_reads = prior_pres_reads prev_reads = prior_prev_reads prime_pres_read = None prime_prev_read = None while True: while prime_pres_read is None: try: pres_read = pres_reads.next() except StopIteration: break pres_date = pres_read.present_date pres_msn = pres_read.msn read_key = '_'.join([str(pres_date), pres_msn]) if read_key in read_keys: continue pres_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= pres_read.bill.start_date, Bill.start_date <= pres_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if pres_bill != pres_read.bill: continue reads = dict( ( read.tpr.code, float(read.present_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == pres_bill.id, RegisterRead.present_date == pres_date, RegisterRead.msn == pres_msn)) prime_pres_read = { 'date': pres_date, 'reads': reads, 'msn': pres_msn} read_keys[read_key] = None while prime_prev_read is None: try: prev_read = prev_reads.next() except StopIteration: break prev_date = prev_read.previous_date prev_msn = prev_read.msn read_key = '_'.join([str(prev_date), prev_msn]) if read_key in read_keys: continue prev_bill = sess.query(Bill).join(BillType). \ filter( Bill.reads.any(), Bill.supply_id == supply.id, Bill.finish_date >= prev_read.bill.start_date, Bill.start_date <= prev_read.bill.finish_date, BillType.code != 'W').order_by( Bill.issue_date.desc(), BillType.code).first() if prev_bill != prev_read.bill: continue reads = dict( ( read.tpr.code, float(read.previous_value) * float(read.coefficient)) for read in sess.query(RegisterRead). filter( RegisterRead.units == 0, RegisterRead.bill_id == prev_bill.id, RegisterRead.previous_date == prev_date, RegisterRead.msn == prev_msn)) prime_prev_read = { 'date': prev_date, 'reads': reads, 'msn': prev_msn} read_keys[read_key] = None if prime_pres_read is None and \ prime_prev_read is None: break elif prime_pres_read is None: read_list.append(prime_prev_read) prime_prev_read = None elif prime_prev_read is None: read_list.append(prime_pres_read) prime_pres_read = None else: if is_forwards: if prime_pres_read['date'] <= \ prime_prev_read['date']: read_list.append(prime_pres_read) prime_pres_read = None else: read_list.append(prime_prev_read) prime_prev_read = None else: if prime_prev_read['date'] >= \ prime_pres_read['date']: read_list.append(prime_prev_read) prime_prev_read = None else: read_list.append(prime_pres_read) prime_pres_read = None if len(read_list) > 1: if is_forwards: aft_read = read_list[-2] fore_read = read_list[-1] else: aft_read = read_list[-1] fore_read = read_list[-2] if aft_read['msn'] == fore_read['msn'] and \ set(aft_read['reads'].keys()) == \ set(fore_read['reads'].keys()): pair_start_date = aft_read['date'] + HH pair_finish_date = fore_read['date'] num_hh = float( totalseconds( pair_finish_date + HH - pair_start_date)) / (30 * 60) tprs = {} for tpr_code, initial_val in \ aft_read['reads'].iteritems(): end_val = fore_read['reads'][tpr_code] kwh = end_val - initial_val if kwh < 0: digits = int( math.log10(initial_val)) + 1 kwh = 10 ** digits + kwh tprs[tpr_code] = float(kwh) / num_hh pairs.append( { 'start-date': pair_start_date, 'finish-date': pair_finish_date, 'tprs': tprs}) if len(pairs) > 0 and \ (not is_forwards or ( is_forwards and read_list[-1]['date'] > period_finish)): break breakdown += 'read list - \n' + str(read_list) + "\n" if len(pairs) == 0: pairs.append( { 'start-date': period_start, 'finish-date': period_finish, 'tprs': {'00001': 0}}) else: for pair in pairs: pair_start = pair['start-date'] pair_finish = pair['finish-date'] if pair_start >= year_start and \ pair_finish <= year_finish: if pair_start > period_start: block_start = pair_start else: block_start = period_start if pair_finish < period_finish: block_finish = pair_finish else: block_finish = period_finish if block_start <= block_finish: normal_days[meter_type] += float( totalseconds( block_finish - block_start) + 60 * 30) / (60 * 60 * 24) # smooth for i in range(1, len(pairs)): pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \ - HH # stretch if pairs[0]['start-date'] > period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] < period_finish: pairs[-1]['finish-date'] = period_finish # chop pairs = [ pair for pair in pairs if not pair['start-date'] > period_finish and not pair['finish-date'] < period_start] # squash if pairs[0]['start-date'] < period_start: pairs[0]['start-date'] = period_start if pairs[-1]['finish-date'] > period_finish: pairs[-1]['finish-date'] = period_finish for pair in pairs: pair_hhs = float( totalseconds( pair['finish-date'] - pair['start-date']) + 30 * 60) / (60 * 30) pair['pair_hhs'] = pair_hhs for tpr_code, pair_kwh in pair['tprs'].iteritems(): total_kwh[meter_type] += pair_kwh * pair_hhs breakdown += 'pairs - \n' + str(pairs) elif meter_type in ('hh', 'amr'): period_kwhs = list( v[0] for v in sess.query(cast(HhDatum.value, Float)). join(Channel).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish)) year_kwhs = list( v[0] for v in sess.query(cast(HhDatum.value, Float)). join(Channel).join(Era).filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Era.supply == supply, HhDatum.start_date >= year_start, HhDatum.start_date <= year_finish)) period_sum_kwhs = sum(period_kwhs) year_sum_kwhs = sum(year_kwhs) period_len_kwhs = len(period_kwhs) year_len_kwhs = len(year_kwhs) total_kwh[meter_type] += period_sum_kwhs period_hhs = totalseconds( period_finish + HH - period_start) / (60 * 30) if year_len_kwhs > 0: filled_kwh[meter_type] += float(year_sum_kwhs) / \ year_len_kwhs * (period_hhs - period_len_kwhs) normal_days[meter_type] += float( sess.query(func.count(HhDatum.value)).join(Channel). filter( Channel.imp_related == true(), Channel.channel_type == 'ACTIVE', Channel.era == era, HhDatum.start_date >= period_start, HhDatum.start_date <= period_finish, HhDatum.status == 'A').one()[0]) / 48 elif meter_type == 'unmetered': bills = sess.query(Bill).filter( Bill.supply_id == supply.id, Bill.finish_date >= period_start, Bill.start_date <= period_finish) for bill in bills: total_kwh[meter_type] += float(bill.kwh) normal_days[meter_type] += float( totalseconds( period_finish - period_start) + 60 * 30) / (60 * 60 * 24) # for full year 183 total_normal_days = sum(normal_days.values()) total_max_normal_days = sum(max_normal_days.values()) is_normal = float( total_normal_days) / total_max_normal_days >= float(183) / 365 f.write( ','.join( '"' + str(val) + '"' for val in [ supply.id, mpan_core, site.code, site.name, hh_format(year_start), hh_format(year_finish), breakdown] + [ normal_days[type] for type in meter_types] + [ max_normal_days[type] for type in meter_types] + [ total_normal_days, total_max_normal_days, "Actual" if is_normal else "Estimated"] + [total_kwh[type] for type in meter_types] + [filled_kwh[type] for type in ('hh', 'amr')] + [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) + '\n') # avoid a long running transaction sess.rollback() except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') f.write("Problem " + msg) finally: if f is not None: f.close() os.rename(running_name, finished_name) if sess is not None: sess.close()
mimetype = 'text/csv' file_extension = ".csv" base_name = "hh_data_row_" + start_date.strftime("%Y%m%d%H%M") + file_extension titles = ','.join('"' + v + '"' for v in ( "Site Code", "Imp MPAN Core", "Exp Mpan Core", "Start Date", "Import ACTIVE", "Import ACTIVE Status", "Import REACTIVE_IMP", "Import REACTIVE_IMP Status", "Import REACTIVE_EXP", "Import REACTIVE_EXP Status", "Export ACTIVE", "Export ACTIVE Status", "Export REACTIVE_IMP", "Export REACTIVE_IMP Status", "Export REACTIVE_EXP", "Export REACTIVE_EXP Status")) + "\n" user = inv.getUser() running_name, finished_name = dloads.make_names(base_name, user) if is_zipped: zf = zipfile.ZipFile(running_name, 'w') else: tmp_file = open(running_name, "w") def content(): sess = None try: sess = db.session() if method == 'POST': supplies = sess.query(Supply).join(Era).filter( Era.start_date <= finish_date, or_(
def content(): global scenario_props sess = None try: sess = db.session() if scenario_props is None: scenario_contract = Contract.get_supplier_by_id(sess, scenario_id) scenario_props = scenario_contract.make_properties() base_name.append(scenario_contract.name) for contract in sess.query(Contract).join(MarketRole).filter( MarketRole.code == 'Z'): try: props = scenario_props[contract.name] except KeyError: continue try: rate_start = props['start_date'] except KeyError: raise UserException( "In " + scenario_contract.name + " for the rate " + contract.name + " the start_date is missing.") if rate_start is not None: rate_start = rate_start.replace(tzinfo=pytz.utc) lib = globals()[contract.name] if hasattr(lib, 'create_future_func'): future_funcs[contract.id] = { 'start_date': rate_start, 'func': lib.create_future_func( props['multiplier'], props['constant'])} start_date = scenario_props['scenario_start'] if start_date is None: start_date = datetime.datetime( now.year, now.month, 1, tzinfo=pytz.utc) else: start_date = start_date.replace(tzinfo=pytz.utc) base_name.append( hh_format(start_date).replace(' ', '_').replace(':', ''). replace('-', '')) months = scenario_props['scenario_duration'] base_name.append('for') base_name.append(str(months)) base_name.append('months') finish_date = start_date + relativedelta(months=months) if 'kwh_start' in scenario_props: kwh_start = scenario_props['kwh_start'] else: kwh_start = None if kwh_start is None: kwh_start = computer.forecast_date() else: kwh_start = kwh_start.replace(tzinfo=pytz.utc) sites = sess.query(Site).join(SiteEra).join(Era).filter( Era.start_date <= finish_date, or_( Era.finish_date == null(), Era.finish_date >= start_date)).distinct() if site_id is not None: site = Site.get_by_id(sess, site_id) sites = sites.filter(Site.id == site.id) base_name.append('site') base_name.append(site.code) if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) base_name.append('supply') base_name.append(str(supply.id)) sites = sites.filter(Era.supply == supply) running_name, finished_name = dloads.make_names( '_'.join(base_name) + '.ods', user) rf = open(running_name, "wb") f = odswriter.writer(rf, '1.1') group_tab = f.new_sheet("Site Level") sup_tab = f.new_sheet("Supply Level") changes = defaultdict(list, {}) try: kw_changes = scenario_props['kw_changes'] except KeyError: kw_changes = '' for row in csv.reader(StringIO.StringIO(kw_changes)): if len(''.join(row).strip()) == 0: continue if len(row) != 4: raise UserException( "Can't interpret the row " + str(row) + " it should be of " "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER") site_code, typ, date_str, kw_str = row date = datetime.datetime.strptime( date_str.strip(), "%Y-%m-%d").replace(tzinfo=pytz.utc) changes[site_code.strip()].append( { 'type': typ.strip(), 'date': date, 'multiplier': float(kw_str)}) sup_header_titles = [ 'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id', 'site-name', 'associated-site-ids', 'month'] site_header_titles = [ 'site-id', 'site-name', 'associated-site-ids', 'month', 'metering-type', 'sources', 'generator-types'] summary_titles = [ 'import-net-kwh', 'export-net-kwh', 'import-gen-kwh', 'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh', 'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh', 'import-net-gbp', 'export-net-gbp', 'import-gen-gbp', 'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp', 'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp', 'billed-import-net-kwh', 'billed-import-net-gbp'] title_dict = {} for cont_type, con_attr in ( ('mop', Era.mop_contract), ('dc', Era.hhdc_contract), ('imp-supplier', Era.imp_supplier_contract), ('exp-supplier', Era.exp_supplier_contract)): titles = [] title_dict[cont_type] = titles conts = sess.query(Contract).join(con_attr) \ .join(Era.supply).join(Source).filter( Era.start_date <= start_date, or_( Era.finish_date == null(), Era.finish_date >= start_date), Source.code.in_(('net', '3rd-party')) ).distinct().order_by(Contract.id) if supply_id is not None: conts = conts.filter(Era.supply_id == supply_id) for cont in conts: title_func = computer.contract_func( report_context, cont, 'virtual_bill_titles', None) if title_func is None: raise Exception( "For the contract " + cont.name + " there doesn't seem to be a " "'virtual_bill_titles' function.") for title in title_func(): if title not in titles: titles.append(title) sup_tab.writerow( sup_header_titles + summary_titles + [None] + ['mop-' + t for t in title_dict['mop']] + [None] + ['dc-' + t for t in title_dict['dc']] + [None] + ['imp-supplier-' + t for t in title_dict['imp-supplier']] + [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']]) group_tab.writerow(site_header_titles + summary_titles) sites = sites.all() month_start = start_date while month_start < finish_date: month_finish = month_start + relativedelta(months=1) - HH for site in sites: site_changes = changes[site.code] site_associates = set() site_category = None site_sources = set() site_gen_types = set() site_month_data = defaultdict(int) for group in site.groups( sess, month_start, month_finish, False): site_associates.update( set( s.code for s in group.sites if s.code != site.code)) for cand_supply in group.supplies: site_sources.add(cand_supply.source.code) if cand_supply.generator_type is not None: site_gen_types.add(cand_supply.generator_type.code) for cand_era in cand_supply.find_eras( sess, group.start_date, group.finish_date): if site_category != 'hh': if cand_era.pc.code == '00': site_category = 'hh' elif site_category != 'amr': if len(cand_era.channels) > 0: site_category = 'amr' elif site_category != 'nhh': if cand_era.mtc.meter_type.code \ not in ['UM', 'PH']: site_category = 'nhh' else: site_category = 'unmetered' for group in site.groups( sess, month_start, month_finish, True): calcs = [] deltas = defaultdict(int) group_associates = set( s.code for s in group.sites if s.code != site.code) for supply in group.supplies: if supply_id is not None and supply.id != supply_id: continue for era in sess.query(Era).join(Supply) \ .join(Source).filter( Era.supply == supply, Era.start_date <= group.finish_date, or_( Era.finish_date == null(), Era.finish_date >= group.start_date)): if era.start_date > group.start_date: ss_start = era.start_date else: ss_start = group.start_date if hh_before(era.finish_date, group.finish_date): ss_finish = era.finish_date else: ss_finish = group.finish_date if era.imp_mpan_core is None: imp_ss = None else: imp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, True, None, report_context) if era.exp_mpan_core is None: exp_ss = None measurement_type = imp_ss.measurement_type else: exp_ss = SupplySource( sess, ss_start, ss_finish, kwh_start, era, False, None, report_context) measurement_type = exp_ss.measurement_type order = meter_order[measurement_type] calcs.append( ( order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss)) if imp_ss is not None and len(era.channels) == 0: for hh in imp_ss.hh_data: deltas[hh['start-date']] += hh['msp-kwh'] imp_net_delts = defaultdict(int) exp_net_delts = defaultdict(int) imp_gen_delts = defaultdict(int) displaced_era = computer.displaced_era( sess, group, group.start_date, group.finish_date) site_ds = computer.SiteSource( sess, site, group.start_date, group.finish_date, kwh_start, None, report_context, displaced_era) for hh in site_ds.hh_data: try: delta = deltas[hh['start-date']] hh['import-net-kwh'] += delta hh['used-kwh'] += delta except KeyError: pass for hh in site_ds.hh_data: for change in site_changes: if change['type'] == 'used' and \ change['date'] <= hh['start-date']: used = change['multiplier'] * hh['used-kwh'] exp_net = max( 0, hh['import-gen-kwh'] - hh['export-gen-kwh'] - used) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = hh['import-gen-kwh'] - \ hh['export-gen-kwh'] - exp_net imp_net = used - displaced imp_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_delt hh['import-net-kwh'] = imp_net hh['used-kwh'] = used hh['export-net-kwh'] = exp_net hh['msp-kwh'] = displaced elif change['type'] == 'generated' and \ change['date'] <= hh['start-date']: imp_gen = change['multiplier'] * \ hh['import-gen-kwh'] imp_gen_delt = imp_gen - hh['import-gen-kwh'] exp_net = max( 0, imp_gen - hh['export-gen-kwh'] - hh['used-kwh']) exp_net_delt = exp_net - hh['export-net-kwh'] exp_net_delts[hh['start-date']] += exp_net_delt displaced = imp_gen - hh['export-gen-kwh'] - \ exp_net imp_net = hh['used-kwh'] - displaced imp_net_delt = imp_net - hh['import-net-kwh'] imp_net_delts[hh['start-date']] += imp_net_delt imp_gen_delts[hh['start-date']] += imp_gen_delt hh['import-net-kwh'] = imp_net hh['export-net-kwh'] = exp_net hh['import-gen-kwh'] = imp_gen hh['msp-kwh'] = displaced if displaced_era is not None and supply_id is None: month_data = {} for sname in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'msp', 'used', 'used-3rd-party', 'billed-import-net'): for xname in ('kwh', 'gbp'): month_data[sname + '-' + xname] = 0 month_data['used-kwh'] = \ month_data['displaced-kwh'] = \ sum(hh['msp-kwh'] for hh in site_ds.hh_data) disp_supplier_contract = \ displaced_era.imp_supplier_contract disp_vb_function = computer.contract_func( report_context, disp_supplier_contract, 'displaced_virtual_bill', None) if disp_vb_function is None: raise UserException( "The supplier contract " + disp_supplier_contract.name + " doesn't have the displaced_virtual_bill() " "function.") disp_vb_function(site_ds) disp_supplier_bill = site_ds.supplier_bill try: gbp = disp_supplier_bill['net-gbp'] except KeyError: disp_supplier_bill['problem'] += \ 'For the supply ' + \ site_ds.mpan_core + \ ' the virtual bill ' + \ str(disp_supplier_bill) + \ ' from the contract ' + \ disp_supplier_contract.name + \ ' does not contain the net-gbp key.' month_data['used-gbp'] = \ month_data['displaced-gbp'] = \ site_ds.supplier_bill['net-gbp'] out = [ None, None, displaced_era.make_meter_category(), 'displaced', None, None, None, None, site.code, site.name, ','.join(sorted(list(group_associates))), month_finish] + \ [month_data[t] for t in summary_titles] sup_tab.writerow(out) for k, v in month_data.iteritems(): site_month_data[k] += v for i, ( order, imp_mpan_core, exp_mpan_core, imp_ss, exp_ss) in enumerate(sorted(calcs)): if imp_ss is None: era = exp_ss.era else: era = imp_ss.era supply = era.supply source = supply.source source_code = source.code site_sources.add(source_code) month_data = {} for name in ( 'import-net', 'export-net', 'import-gen', 'export-gen', 'import-3rd-party', 'export-3rd-party', 'displaced', 'used', 'used-3rd-party', 'billed-import-net'): for sname in ('kwh', 'gbp'): month_data[name + '-' + sname] = 0 if source_code == 'net': delts = imp_net_delts elif source_code == 'gen': delts = imp_gen_delts else: delts = [] if len(delts) > 0 and imp_ss is not None: for hh in imp_ss.hh_data: diff = hh['msp-kwh'] + delts[hh['start-date']] if diff < 0: hh['msp-kwh'] = 0 hh['msp-kw'] = 0 delts[hh['start-date']] -= hh['msp-kwh'] else: hh['msp-kwh'] += delts[hh['start-date']] hh['msp-kw'] += hh['msp-kwh'] / 2 del delts[hh['start-date']] left_kwh = sum(delts.values()) if left_kwh > 0: first_hh = imp_ss.hh_data[0] first_hh['msp-kwh'] += left_kwh first_hh['msp-kw'] += left_kwh / 2 imp_supplier_contract = era.imp_supplier_contract if imp_supplier_contract is not None: import_vb_function = computer.contract_func( report_context, imp_supplier_contract, 'virtual_bill', None) if import_vb_function is None: raise UserException( "The supplier contract " + imp_supplier_contract.name + " doesn't have the virtual_bill() " "function.") import_vb_function(imp_ss) imp_supplier_bill = imp_ss.supplier_bill try: gbp = imp_supplier_bill['net-gbp'] except KeyError: imp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' if source_code in ('net', 'gen-net'): month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party': month_data['import-3rd-party-gbp'] += gbp month_data['used-gbp'] += gbp elif source_code == '3rd-party-reverse': month_data['export-3rd-party-gbp'] += gbp month_data['used-gbp'] -= gbp kwh = sum( hh['msp-kwh'] for hh in imp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['import-net-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party': month_data['import-3rd-party-kwh'] += kwh month_data['used-kwh'] += kwh elif source_code == '3rd-party-reverse': month_data['export-3rd-party-kwh'] += kwh month_data['used-kwh'] -= kwh elif source_code in ('gen', 'gen-net'): month_data['import-gen-kwh'] += kwh exp_supplier_contract = era.exp_supplier_contract if exp_supplier_contract is None: kwh = sess.query( func.coalesce( func.sum( cast(HhDatum.value, Float)), 0)). \ join(Channel).filter( Channel.era == era, Channel.channel_type == 'ACTIVE', Channel.imp_related == false()).scalar() if source_code == 'gen': month_data['export-net-kwh'] += kwh else: export_vb_function = computer.contract_func( report_context, exp_supplier_contract, 'virtual_bill', None) export_vb_function(exp_ss) exp_supplier_bill = exp_ss.supplier_bill try: gbp = exp_supplier_bill['net-gbp'] except KeyError: exp_supplier_bill['problem'] += \ 'For the supply ' + \ imp_ss.mpan_core + \ ' the virtual bill ' + \ str(imp_supplier_bill) + \ ' from the contract ' + \ imp_supplier_contract.name + \ ' does not contain the net-gbp key.' kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data) if source_code in ('net', 'gen-net'): month_data['export-net-kwh'] += kwh month_data['export-net-gbp'] += gbp elif source_code in \ ('3rd-party', '3rd-party-reverse'): month_data['export-3rd-party-kwh'] += kwh month_data['export-3rd-party-gbp'] += gbp month_data['used-kwh'] -= kwh month_data['used-gbp'] -= gbp elif source_code == 'gen': month_data['export-gen-kwh'] += kwh sss = exp_ss if imp_ss is None else imp_ss dc_contract = era.hhdc_contract sss.contract_func( dc_contract, 'virtual_bill')(sss) dc_bill = sss.dc_bill gbp = dc_bill['net-gbp'] mop_contract = era.mop_contract mop_bill_function = sss.contract_func( mop_contract, 'virtual_bill') mop_bill_function(sss) mop_bill = sss.mop_bill gbp += mop_bill['net-gbp'] if source_code in ('3rd-party', '3rd-party-reverse'): month_data['import-3rd-party-gbp'] += gbp else: month_data['import-net-gbp'] += gbp month_data['used-gbp'] += gbp if source_code in ('gen', 'gen-net'): generator_type = supply.generator_type.code site_gen_types.add(generator_type) else: generator_type = None sup_category = era.make_meter_category() if CATEGORY_ORDER[site_category] < \ CATEGORY_ORDER[sup_category]: site_category = sup_category for bill in sess.query(Bill).filter( Bill.supply == supply, Bill.start_date <= sss.finish_date, Bill.finish_date >= sss.start_date): bill_start = bill.start_date bill_finish = bill.finish_date bill_duration = totalseconds( bill_finish - bill_start) + (30 * 60) overlap_duration = totalseconds( min(bill_finish, sss.finish_date) - max(bill_start, sss.start_date)) + (30 * 60) overlap_proportion = \ float(overlap_duration) / bill_duration month_data['billed-import-net-kwh'] += \ overlap_proportion * float(bill.kwh) month_data['billed-import-net-gbp'] += \ overlap_proportion * float(bill.net) out = [ era.imp_mpan_core, era.exp_mpan_core, sup_category, source_code, generator_type, supply.name, era.msn, era.pc.code, site.code, site.name, ','.join(sorted(list(site_associates))), month_finish] + [ month_data[t] for t in summary_titles] + [None] + [ (mop_bill[t] if t in mop_bill else None) for t in title_dict['mop']] + [None] + \ [(dc_bill[t] if t in dc_bill else None) for t in title_dict['dc']] if imp_supplier_contract is None: out += [None] * \ (len(title_dict['imp-supplier']) + 1) else: out += [None] + [ ( imp_supplier_bill[t] if t in imp_supplier_bill else None) for t in title_dict['imp-supplier']] if exp_supplier_contract is not None: out += [None] + [ ( exp_supplier_bill[t] if t in exp_supplier_bill else None) for t in title_dict['exp-supplier']] for k, v in month_data.iteritems(): site_month_data[k] += v sup_tab.writerow(out) sess.rollback() group_tab.writerow( [ site.code, site.name, ''.join(sorted(list(site_associates))), month_finish, site_category, ', '.join(sorted(list(site_sources))), ', '.join(sorted(list(site_gen_types)))] + [site_month_data[k] for k in summary_titles]) month_start += relativedelta(months=1) except: msg = traceback.format_exc() sys.stderr.write(msg + '\n') group_tab.writerow(["Problem " + msg]) finally: try: f.close() rf.close() os.rename(running_name, finished_name) if sess is not None: sess.close() except: msg = traceback.format_exc() r_name, f_name = dloads.make_names('error.txt', user) ef = open(r_name, "wb") ef.write(msg + '\n') ef.close()
supply_id = form_int(inv, 'supply_id') else: supply_id = None if inv.hasParameter('mpan_cores'): mpan_cores_str = form_str(inv, 'mpan_cores') mpan_cores = mpan_cores_str.splitlines() if len(mpan_cores) == 0: mpan_cores = None else: for i in range(len(mpan_cores)): mpan_cores[i] = utils.parse_mpan_core(mpan_cores[i]) else: mpan_cores = None running_name, finished_name = dloads.make_names('supplies_snapshot.csv', user) def content(): sess = None try: sess = db.session() f = open(running_name, "w") f.write( ','.join( ( 'Date', 'Physical Site Id', 'Physical Site Name', 'Other Site Ids', 'Other Site Names', 'Supply Id', 'Source', 'Generator Type', 'DNO Name', 'Voltage Level', 'Metering Type', 'Mandatory HH', 'PC', 'MTC', 'CoP', 'SSC',
def content(): sess = None try: sess = db.session() supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supply = Supply.get_by_id(sess, supply_id) supplies = supplies.filter(Supply.id == supply.id) first_era = sess.query(Era).filter( Era.supply == supply, or_( Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Era.start_date).first() if first_era.imp_mpan_core is None: name_core = first_era.exp_mpan_core else: name_core = first_era.imp_mpan_core base_name.append("supply_" + name_core.replace(' ', '_')) if mpan_cores is not None: supplies = supplies.filter( or_( Era.imp_mpan_core.in_(mpan_cores), Era.exp_mpan_core.in_(mpan_cores))) base_name.append('filter') outs = [] titles = "MPAN Core,Date," + ','.join(map(str, range(48))) running_name, finished_name = dloads.make_names( '_'.join(base_name) + ('.zip' if is_zipped else '.csv'), user) if is_zipped: zf = zipfile.ZipFile(running_name, "w", zipfile.ZIP_DEFLATED) else: tf = open(running_name, "w") outs.append(titles) for supply in supplies: era = supply.find_era_at(sess, finish_date) if era is None or era.imp_mpan_core is None: mpan_core_str = "NA" else: mpan_core_str = era.imp_mpan_core current_date = start_date hh_data = iter( sess.query(HhDatum).join(Channel).join(Era).filter( Era.supply == supply, HhDatum.start_date >= start_date, HhDatum.start_date <= finish_date, Channel.imp_related == imp_related, Channel.channel_type == channel_type ).order_by(HhDatum.start_date)) try: datum = hh_data.next() except StopIteration: datum = None while not current_date > finish_date: if current_date.hour == 0 and current_date.minute == 0: outs.append( "\n" + mpan_core_str + "," + current_date.strftime('%Y-%m-%d')) outs.append(",") if datum is not None and datum.start_date == current_date: outs.append(str(datum.value)) try: datum = hh_data.next() except StopIteration: datum = None current_date += HH if is_zipped: fname = mpan_core_str + '_' + str(supply.id) + '.csv' zf.writestr(fname.encode('ascii'), titles + ''.join(outs)) else: tf.write(''.join(outs)) outs = [] if is_zipped: zf.close() else: tf.close() except: msg = traceback.format_exc() if is_zipped: zf.writestr('error.txt', msg) zf.close() else: tf.write(msg) finally: os.rename(running_name, finished_name) if sess is not None: sess.close()
def content(): sess = tmp_file = None try: sess = db.session() running_name, finished_name = dloads.make_names('bill_check.csv', user) tmp_file = open(running_name, "w") if batch_id is not None: batch = Batch.get_by_id(sess, batch_id) bills = sess.query(Bill).filter( Bill.batch_id == batch.id).order_by(Bill.reference) elif bill_id is not None: bill = Bill.get_by_id(sess, bill_id) bills = sess.query(Bill).filter(Bill.id == bill.id) batch = bill.batch contract = batch.contract market_role_code = contract.market_role.code vbf = computer.contract_func(caches, contract, 'virtual_bill', None) if vbf is None: raise UserException( 'The contract ' + contract.name + " doesn't have a function virtual_bill.") virtual_bill_titles_func = computer.contract_func( caches, contract, 'virtual_bill_titles', None) if virtual_bill_titles_func is None: raise UserException( 'The contract ' + contract.name + " doesn't have a function virtual_bill_titles.") virtual_bill_titles = virtual_bill_titles_func() tmp_file.write( ','.join( [ 'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp', 'bill-vat-gbp', 'bill-start-date', 'bill-finish-date', 'bill-mpan-core', 'site-code', 'site-name', 'covered-from', 'covered-to', 'covered-bills'] + [ 'covered-' + val + ',virtual-' + val + ( ',difference-' + val if val.endswith('-gbp') else '') for val in virtual_bill_titles]) + '\n') for bill in bills: problem = '' supply = bill.supply read_dict = {} for read in bill.reads: gen_start = read.present_date.replace(hour=0).replace(minute=0) gen_finish = gen_start + relativedelta(days=1) - HH msn_match = False read_msn = read.msn for read_era in supply.find_eras(sess, gen_start, gen_finish): if read_msn == read_era.msn: msn_match = True break if not msn_match: problem += "The MSN " + read_msn + \ " of the register read " + str(read.id) + \ " doesn't match the MSN of the era." for dt, type in [ (read.present_date, read.present_type), (read.previous_date, read.previous_type)]: key = str(dt) + "-" + read.msn try: if type != read_dict[key]: problem += " Reads taken on " + str(dt) + \ " have differing read types." except KeyError: read_dict[key] = type bill_start = bill.start_date bill_finish = bill.finish_date era = supply.find_era_at(sess, bill.finish_date) if era is None: tmp_file.write( "\n,,,,,,,,,,Extraordinary! There isn't a era for " "this bill!") continue tmp_file.write( ','.join( '"' + str(val) + '"' for val in [ batch.reference, bill.reference, bill.bill_type.code, bill.kwh, bill.net, bill.vat, hh_format(bill_start), hh_format(bill_finish), era.imp_mpan_core]) + ",") covered_start = bill_start covered_finish = bill_finish covered_bill_ids = [] covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0} covered_primary_bill = None enlarged = True while enlarged: enlarged = False for covered_bill in sess.query(Bill).filter( Bill.supply_id == supply.id, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start).order_by( Bill.issue_date.desc(), Bill.start_date): if market_role_code != \ covered_bill.batch.contract.market_role.code: continue if covered_primary_bill is None and \ len(covered_bill.reads) > 0: covered_primary_bill = covered_bill if covered_bill.start_date < covered_start: covered_start = covered_bill.start_date enlarged = True break if covered_bill.finish_date > covered_finish: covered_finish = covered_bill.finish_date enlarged = True break for covered_bill in sess.query(Bill).filter( Bill.supply_id == supply.id, Bill.start_date <= covered_finish, Bill.finish_date >= covered_start).order_by( Bill.issue_date.desc(), Bill.start_date): if market_role_code != \ covered_bill.batch.contract.market_role.code: continue covered_bill_ids.append(covered_bill.id) covered_bdown['net-gbp'] += float(covered_bill.net) covered_bdown['vat-gbp'] += float(covered_bill.vat) covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh) if len(covered_bill.breakdown) > 0: covered_rates = collections.defaultdict(set) for k, v in eval(covered_bill.breakdown, {}).iteritems(): if k.endswith('rate'): covered_rates[k].add(v) elif k != 'raw-lines': try: covered_bdown[k] += v except KeyError: covered_bdown[k] = v except TypeError, detail: raise UserException( "For key " + str(k) + " the value " + str(v) + " can't be added to the existing value " + str(covered_bdown[k]) + ". " + str(detail)) for k, v in covered_rates.iteritems(): covered_bdown[k] = v.pop() if len(v) == 1 else None virtual_bill = {} for era in sess.query(Era).filter( Era.supply_id == supply.id, Era.imp_mpan_core != null(), Era.start_date <= covered_finish, or_( Era.finish_date == null(), Era.finish_date >= covered_start)).distinct(): site = sess.query(Site).join(SiteEra).filter( SiteEra.is_physical == true(), SiteEra.era_id == era.id).one() if covered_start > era.start_date: chunk_start = covered_start else: chunk_start = era.start_date if hh_before(covered_finish, era.finish_date): chunk_finish = covered_finish else: chunk_finish = era.finish_date data_source = computer.SupplySource( sess, chunk_start, chunk_finish, forecast_date, era, True, None, caches, covered_primary_bill) vbf(data_source) if market_role_code == 'X': vb = data_source.supplier_bill elif market_role_code == 'C': vb = data_source.dc_bill elif market_role_code == 'M': vb = data_source.mop_bill else: raise UserException("Odd market role.") for k, v in vb.iteritems(): try: virtual_bill[k] += v except KeyError: virtual_bill[k] = v except TypeError, detail: raise UserException( "For key " + str(k) + " and value " + str(v) + ". " + str(detail)) values = [ site.code, site.name, hh_format(covered_start), hh_format(covered_finish), ';'.join(str(id).replace(',', '') for id in covered_bill_ids)] for title in virtual_bill_titles: try: cov_val = covered_bdown[title] values.append(cov_val) del covered_bdown[title] except KeyError: cov_val = None values.append('') try: virt_val = virtual_bill[title] if isinstance(virt_val, datetime.datetime): virt_val = hh_format(virt_val) values.append(virt_val) del virtual_bill[title] except KeyError: virt_val = None values.append('') if title.endswith('-gbp'): if all(isinstance(val, (int, float)) for val in [ cov_val, virt_val]): values.append(cov_val - virt_val) else: values.append('') for title in sorted(virtual_bill.keys()): val = virtual_bill[title] if isinstance(val, datetime.datetime): val = hh_format(val) values += ['virtual-' + title, val] if title in covered_bdown: values += ['covered-' + title, covered_bdown[title]] else: values += ['', ''] tmp_file.write( ','.join('"' + str(value) + '"' for value in values) + '\n')
def content(): sess = None f = None try: sess = db.session() running_name, finished_name = dloads.make_names( 'supplies_duration.csv', user) f = open(running_name, "w") f.write( ','.join( ( "Supply Id", "Supply Name", "Source", "Generator Type", "Site Ids", "Site Names", "From", "To", "PC", "MTC", "CoP", "SSC", "Normal Reads", "Type", "Import LLFC", "Import MPAN Core", "Import Supply Capacity", "Import Supplier", "Import Total MSP kWh", "Import Non-actual MSP kWh", "Import Total GSP kWh", "Import MD / kW", "Import MD Date", "Import MD / kVA", "Import Bad HHs", "Export LLFC", "Export MPAN Core", "Export Supply Capacity", "Export Supplier", "Export Total MSP kWh", "Export Non-actual MSP kWh", "Export GSP kWh", "Export MD / kW", "Export MD Date", "Export MD / kVA", "Export Bad HHs"))) supplies = sess.query(Supply).join(Era).filter( or_(Era.finish_date == null(), Era.finish_date >= start_date), Era.start_date <= finish_date).order_by(Supply.id).distinct() if supply_id is not None: supplies = supplies.filter( Supply.id == Supply.get_by_id(sess, supply_id).id) for supply in supplies: site_codes = '' site_names = '' eras = supply.find_eras(sess, start_date, finish_date) era = eras[-1] for site_era in era.site_eras: site = site_era.site site_codes = site_codes + site.code + ', ' site_names = site_names + site.name + ', ' site_codes = site_codes[:-2] site_names = site_names[:-2] if supply.generator_type is None: generator_type = '' else: generator_type = supply.generator_type.code ssc = era.ssc ssc_code = '' if ssc is None else ssc.code prime_reads = set() for read, rdate in chain( sess.query( RegisterRead, RegisterRead.previous_date).join( RegisterRead.previous_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.previous_date >= start_date, RegisterRead.previous_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES)), sess.query( RegisterRead, RegisterRead.present_date).join( RegisterRead.present_type).join(Bill).join( BillType).filter( Bill.supply == supply, BillType.code != 'W', RegisterRead.present_date >= start_date, RegisterRead.present_date <= finish_date, ReadType.code.in_(NORMAL_READ_TYPES))): prime_bill = sess.query(Bill).join(BillType).filter( Bill.supply == supply, Bill.start_date <= read.bill.finish_date, Bill.finish_date >= read.bill.start_date, Bill.reads.any()).order_by( Bill.issue_date.desc(), BillType.code).first() if prime_bill.id == read.bill.id: prime_reads.add( str(rdate) + "_" + read.msn) supply_type = era.make_meter_category() if eras[0].start_date > start_date: chunk_start = eras[0].start_date else: chunk_start = start_date if hh_before(finish_date, era.finish_date): chunk_finish = finish_date else: chunk_finish = era.finish_date num_hh = utils.totalseconds(chunk_finish - (chunk_start - HH)) / \ (30 * 60) f.write( '\n' + ','.join( ('"' + str(value) + '"') for value in [ supply.id, supply.name, supply.source.code, generator_type, site_codes, site_names, hh_format(start_date), hh_format(finish_date), era.pc.code, era.mtc.code, era.cop.code, ssc_code, len(prime_reads), supply_type]) + ',') f.write( mpan_bit( sess, supply, True, num_hh, eras, chunk_start, chunk_finish) + "," + mpan_bit( sess, supply, False, num_hh, eras, chunk_start, chunk_finish)) except: f.write(traceback.format_exc()) finally: if sess is not None: sess.close() f.close() os.rename(running_name, finished_name)