Exemple #1
0
def _make_calcs(
    sess,
    site,
    start_date,
    finish_date,
    supply_id,
    site_deltas,
    forecast_from,
    report_context,
    era_maps,
):
    site_gen_types = set()
    calcs = []
    for era in (sess.query(Era).join(SiteEra).join(Pc).filter(
            SiteEra.site == site,
            SiteEra.is_physical == true(),
            Era.start_date <= finish_date,
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
    ).options(
            joinedload(Era.ssc),
            joinedload(Era.dc_contract),
            joinedload(Era.mop_contract),
            joinedload(Era.imp_supplier_contract),
            joinedload(Era.exp_supplier_contract),
            joinedload(Era.channels),
            joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.cop),
            joinedload(Era.supply).joinedload(Supply.dno),
            joinedload(Era.supply).joinedload(Supply.gsp_group),
            joinedload(Era.supply).joinedload(Supply.source),
            joinedload(Era.mtc).joinedload(Mtc.meter_type),
            joinedload(Era.pc),
            joinedload(Era.site_eras),
    ).order_by(Pc.code)):

        supply = era.supply
        if supply.generator_type is not None:
            site_gen_types.add(supply.generator_type.code)

        if supply_id is not None and supply.id != supply_id:
            continue

        ss_start = hh_max(era.start_date, start_date)
        ss_finish = hh_min(era.finish_date, finish_date)

        if era.imp_mpan_core is None:
            imp_ss = None
        else:
            sup_deltas = site_deltas["supply_deltas"][True][supply.source.code]

            imp_ss = SupplySource(
                sess,
                ss_start,
                ss_finish,
                forecast_from,
                era,
                True,
                report_context,
                era_maps=era_maps,
                deltas=sup_deltas,
            )

        if era.exp_mpan_core is None:
            exp_ss = None
            measurement_type = imp_ss.measurement_type
        else:
            sup_deltas = site_deltas["supply_deltas"][False][
                supply.source.code]

            exp_ss = SupplySource(
                sess,
                ss_start,
                ss_finish,
                forecast_from,
                era,
                False,
                report_context,
                era_maps=era_maps,
                deltas=sup_deltas,
            )
            measurement_type = exp_ss.measurement_type

        order = meter_order[measurement_type]
        calcs.append(
            (order, era.imp_mpan_core, era.exp_mpan_core, imp_ss, exp_ss))

    # Check if gen deltas haven't been consumed
    extra_sss = set()
    for is_imp in (True, False):
        sup_deltas = site_deltas["supply_deltas"][is_imp]["gen"]
        if (len(
                list(t for t in sup_deltas["site"]
                     if start_date <= t <= finish_date)) > 0):
            extra_sss.add(is_imp)

    displaced_era = chellow.computer.displaced_era(
        sess,
        report_context,
        site,
        start_date,
        finish_date,
        forecast_from,
        has_scenario_generation=len(extra_sss) > 0,
    )

    if len(extra_sss) > 0:
        if True in extra_sss:
            sup_deltas = site_deltas["supply_deltas"][True]["gen"]
            imp_ss_name = site.code + "_extra_gen_TRUE"
            imp_ss = ScenarioSource(
                sess,
                start_date,
                finish_date,
                True,
                report_context,
                sup_deltas,
                displaced_era.imp_supplier_contract,
                imp_ss_name,
            )
        else:
            imp_ss_name = imp_ss = None
        if False in extra_sss:
            exp_ss_name = site.code + "_extra_gen_FALSE"
            sup_deltas = site_deltas["supply_deltas"][False]["gen"]
            exp_ss = ScenarioSource(
                sess,
                start_date,
                finish_date,
                False,
                report_context,
                sup_deltas,
                displaced_era.imp_supplier_contract,
                imp_ss_name,
            )
        else:
            exp_ss_name = exp_ss = None

        calcs.append((0, imp_ss_name, exp_ss_name, imp_ss, exp_ss))

    # Check if exp net deltas haven't been consumed
    sup_deltas = site_deltas["supply_deltas"][False]["net"]
    if len(
            list(t for t in sup_deltas["site"]
                 if start_date <= t <= finish_date)) > 0:
        ss_name = site.code + "_extra_net_export"
        ss = SupplySource(
            sess,
            start_date,
            finish_date,
            forecast_from,
            displaced_era,
            False,
            report_context,
            era_maps=era_maps,
            deltas=sup_deltas,
        )

        calcs.append((0, None, ss_name, None, ss))
    return calcs, displaced_era, site_gen_types
Exemple #2
0
def _process_supply(
    sess,
    caches,
    supply_id,
    bill_map,
    forecast_date,
    contract,
    vbf,
    virtual_bill_titles,
    writer,
    titles,
    report_run,
):
    gaps = {}
    data_sources = {}
    market_role_code = contract.market_role.code
    bill_ids = bill_map[supply_id]

    while len(bill_ids) > 0:
        bill_id = list(sorted(bill_ids))[0]
        bill_ids.remove(bill_id)
        bill = (sess.query(Bill).filter(Bill.id == bill_id).options(
            joinedload(Bill.batch),
            joinedload(Bill.bill_type),
            joinedload(Bill.reads),
            joinedload(Bill.supply),
            joinedload(Bill.reads).joinedload(RegisterRead.present_type),
            joinedload(Bill.reads).joinedload(RegisterRead.previous_type),
        ).one())
        virtual_bill = {"problem": ""}
        supply = bill.supply

        read_dict = {}
        for read in bill.reads:
            gen_start = read.present_date.replace(hour=0).replace(minute=0)
            gen_finish = gen_start + relativedelta(days=1) - HH
            msn_match = False
            read_msn = read.msn
            for read_era in supply.find_eras(sess, gen_start, gen_finish):
                if read_msn == read_era.msn:
                    msn_match = True
                    break

            if not msn_match:
                virtual_bill["problem"] += (
                    f"The MSN {read_msn} of the register read {read.id} doesn't match "
                    f"the MSN of the era.")

            for dt, typ in [
                (read.present_date, read.present_type),
                (read.previous_date, read.previous_type),
            ]:
                key = str(dt) + "-" + read.msn
                try:
                    if typ != read_dict[key]:
                        virtual_bill[
                            "problem"] += f" Reads taken on {dt} have differing read types."
                except KeyError:
                    read_dict[key] = typ

        bill_start = bill.start_date
        bill_finish = bill.finish_date

        covered_start = bill_start
        covered_finish = bill_start
        covered_bdown = {"sum-msp-kwh": 0, "net-gbp": 0, "vat-gbp": 0}

        vb_elems = set()
        enlarged = True

        while enlarged:
            enlarged = False
            covered_elems = find_elements(bill)
            covered_bills = OrderedDict((b.id, b) for b in sess.query(
                Bill).join(Batch).join(Contract).join(MarketRole).filter(
                    Bill.supply == supply,
                    Bill.start_date <= covered_finish,
                    Bill.finish_date >= covered_start,
                    MarketRole.code == market_role_code,
                ).order_by(Bill.start_date, Bill.issue_date))
            while True:
                to_del = None
                for a, b in combinations(covered_bills.values(), 2):
                    if all((
                            a.start_date == b.start_date,
                            a.finish_date == b.finish_date,
                            a.kwh == -1 * b.kwh,
                            a.net == -1 * b.net,
                            a.vat == -1 * b.vat,
                            a.gross == -1 * b.gross,
                    )):
                        to_del = (a.id, b.id)
                        break
                if to_del is None:
                    break
                else:
                    for k in to_del:
                        del covered_bills[k]
                        bill_ids.discard(k)

            for k, covered_bill in tuple(covered_bills.items()):
                elems = find_elements(covered_bill)
                if elems.isdisjoint(covered_elems):
                    if k != bill.id:
                        del covered_bills[k]
                        continue
                else:
                    covered_elems.update(elems)

                if covered_bill.start_date < covered_start:
                    covered_start = covered_bill.start_date
                    enlarged = True
                    break

                if covered_bill.finish_date > covered_finish:
                    covered_finish = covered_bill.finish_date
                    enlarged = True
                    break

        if len(covered_bills) == 0:
            continue

        primary_covered_bill = None
        for covered_bill in covered_bills.values():
            bill_ids.discard(covered_bill.id)
            covered_bdown["net-gbp"] += float(covered_bill.net)
            covered_bdown["vat-gbp"] += float(covered_bill.vat)
            covered_bdown["sum-msp-kwh"] += float(covered_bill.kwh)
            covered_rates = defaultdict(set)
            for k, v in loads(covered_bill.breakdown).items():
                if k in ("raw_lines", "raw-lines"):
                    continue

                if isinstance(v, list):
                    covered_rates[k].update(set(v))
                else:
                    if isinstance(v, Decimal):
                        v = float(v)
                    try:
                        covered_bdown[k] += v
                    except KeyError:
                        covered_bdown[k] = v
                    except TypeError as detail:
                        raise BadRequest(
                            f"For key {k} in {[b.id for b in covered_bills.values()]} "
                            f"the value {v} can't be added to the existing value "
                            f"{covered_bdown[k]}. {detail}")

                    if k.endswith("-gbp"):
                        elem = k[:-4]
                        covered_elems.add(elem)
                        add_gap(
                            caches,
                            gaps,
                            elem,
                            covered_bill.start_date,
                            covered_bill.finish_date,
                            False,
                            v,
                        )

            for k, v in covered_rates.items():
                covered_bdown[k] = v.pop() if len(v) == 1 else None

            if primary_covered_bill is None or (
                (covered_bill.finish_date - covered_bill.start_date) >
                (primary_covered_bill.finish_date -
                 primary_covered_bill.start_date)):
                primary_covered_bill = covered_bill

        metered_kwh = 0
        for era in (sess.query(Era).filter(
                Era.supply == supply,
                Era.start_date <= covered_finish,
                or_(Era.finish_date == null(),
                    Era.finish_date >= covered_start),
        ).distinct().options(
                joinedload(Era.channels),
                joinedload(Era.cop),
                joinedload(Era.dc_contract),
                joinedload(Era.exp_llfc),
                joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level),
                joinedload(Era.exp_supplier_contract),
                joinedload(Era.imp_llfc),
                joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level),
                joinedload(Era.imp_supplier_contract),
                joinedload(Era.mop_contract),
                joinedload(Era.mtc).joinedload(Mtc.meter_type),
                joinedload(Era.pc),
                joinedload(Era.supply).joinedload(Supply.dno),
                joinedload(Era.supply).joinedload(Supply.gsp_group),
                joinedload(Era.supply).joinedload(Supply.source),
        )):

            chunk_start = hh_max(covered_start, era.start_date)
            chunk_finish = hh_min(covered_finish, era.finish_date)

            if contract not in (
                    era.mop_contract,
                    era.dc_contract,
                    era.imp_supplier_contract,
                    era.exp_supplier_contract,
            ):
                virtual_bill["problem"] += (
                    f"From {hh_format(chunk_start)} to {hh_format(chunk_finish)} "
                    f"the contract of the era doesn't match the contract of the bill."
                )
                continue

            if contract.market_role.code == "X":
                polarity = contract != era.exp_supplier_contract
            else:
                polarity = era.imp_supplier_contract is not None

            try:
                ds_key = (
                    chunk_start,
                    chunk_finish,
                    forecast_date,
                    era.id,
                    polarity,
                    primary_covered_bill.id,
                )
                data_source = data_sources[ds_key]
            except KeyError:
                data_source = data_sources[
                    ds_key] = chellow.computer.SupplySource(
                        sess,
                        chunk_start,
                        chunk_finish,
                        forecast_date,
                        era,
                        polarity,
                        caches,
                        primary_covered_bill,
                    )
                vbf(data_source)

            if data_source.measurement_type == "hh":
                metered_kwh += sum(h["msp-kwh"] for h in data_source.hh_data)
            else:
                ds = chellow.computer.SupplySource(
                    sess,
                    chunk_start,
                    chunk_finish,
                    forecast_date,
                    era,
                    polarity,
                    caches,
                )
                metered_kwh += sum(h["msp-kwh"] for h in ds.hh_data)

            if market_role_code == "X":
                vb = data_source.supplier_bill
                vb_hhs = data_source.supplier_bill_hhs
            elif market_role_code == "C":
                vb = data_source.dc_bill
                vb_hhs = data_source.dc_bill_hhs
            elif market_role_code == "M":
                vb = data_source.mop_bill
                vb_hhs = data_source.mop_bill_hhs
            else:
                raise BadRequest("Odd market role.")

            for k, v in vb.items():
                try:
                    if isinstance(v, set):
                        virtual_bill[k].update(v)
                    else:
                        virtual_bill[k] += v
                except KeyError:
                    virtual_bill[k] = v
                except TypeError as detail:
                    raise BadRequest(f"For key {k} and value {v}. {detail}")

            for dt, bl in vb_hhs.items():
                for k, v in bl.items():
                    if all((k.endswith("-gbp"), k != "net-gbp", v != 0)):
                        add_gap(caches, gaps, k[:-4], dt, dt, True, v)

            for k in virtual_bill.keys():
                if k.endswith("-gbp"):
                    vb_elems.add(k[:-4])

        long_map = {}
        vb_keys = set(virtual_bill.keys())
        for elem in sorted(vb_elems, key=len, reverse=True):
            els = long_map[elem] = set()
            for k in tuple(vb_keys):
                if k.startswith(elem + "-"):
                    els.add(k)
                    vb_keys.remove(k)

        for elem in vb_elems.difference(covered_elems):
            for k in long_map[elem]:
                del virtual_bill[k]

        try:
            del virtual_bill["net-gbp"]
        except KeyError:
            pass

        virtual_bill["net-gbp"] = sum(v for k, v in virtual_bill.items()
                                      if k.endswith("-gbp"))

        era = supply.find_era_at(sess, bill_finish)
        if era is None:
            imp_mpan_core = exp_mpan_core = None
            site_code = site_name = None
            virtual_bill[
                "problem"] += "This bill finishes before or after the supply. "
        else:
            imp_mpan_core = era.imp_mpan_core
            exp_mpan_core = era.exp_mpan_core

            site = (sess.query(Site).join(SiteEra).filter(
                SiteEra.is_physical == true(), SiteEra.era == era).one())
            site_code = site.code
            site_name = site.name

        # Find bill to use for header data
        if bill.id not in covered_bills:
            for cbill in covered_bills.values():
                if bill.batch == cbill.batch:
                    bill = cbill

        values = {
            "batch": bill.batch.reference,
            "bill-reference": bill.reference,
            "bill-type": bill.bill_type.code,
            "bill-kwh": bill.kwh,
            "bill-net-gbp": bill.net,
            "bill-vat-gbp": bill.vat,
            "bill-start-date": bill_start,
            "bill-finish-date": bill_finish,
            "imp-mpan-core": imp_mpan_core,
            "exp-mpan-core": exp_mpan_core,
            "site-code": site_code,
            "site-name": site_name,
            "covered-from": covered_start,
            "covered-to": covered_finish,
            "covered-bills": sorted(covered_bills.keys()),
            "metered-kwh": metered_kwh,
        }

        for title in virtual_bill_titles:
            try:
                cov_val = covered_bdown[title]
                del covered_bdown[title]
            except KeyError:
                cov_val = None

            values[f"covered-{title}"] = cov_val

            try:
                virt_val = virtual_bill[title]
                del virtual_bill[title]
            except KeyError:
                virt_val = None

            values[f"virtual-{title}"] = virt_val

            if title.endswith("-gbp"):
                if isinstance(virt_val, (int, float, Decimal)):
                    if isinstance(cov_val, (int, float, Decimal)):
                        diff_val = float(cov_val) - float(virt_val)
                    else:
                        diff_val = 0 - float(virt_val)
                else:
                    diff_val = 0

                values[f"difference-{title}"] = diff_val

        report_run_titles = list(titles)
        for title in sorted(virtual_bill.keys()):
            virt_val = virtual_bill[title]
            virt_title = f"virtual-{title}"
            values[virt_title] = virt_val
            report_run_titles.append(virt_title)
            if title in covered_bdown:
                cov_title = f"covered-{title}"
                cov_val = covered_bdown[title]
                values[cov_title] = cov_val
                report_run_titles.append(cov_title)
                if title.endswith("-gbp"):
                    if isinstance(virt_val, (int, float, Decimal)):
                        if isinstance(cov_val, (int, float, Decimal)):
                            diff_val = float(cov_val) - float(virt_val)
                        else:
                            diff_val = 0 - float(virt_val)
                    else:
                        diff_val = 0

                    values[f"difference-{title}"] = diff_val

                    t = "difference-tpr-gbp"
                    try:
                        values[t] += diff_val
                    except KeyError:
                        values[t] = diff_val
                        report_run_titles.append(t)

        csv_row = []
        for t in titles:
            v = values[t]
            if t == "covered-bills":
                val = " | ".join(str(b) for b in v)
            else:
                val = csv_make_val(v)

            csv_row.append(val)

        for t in report_run_titles:
            if t not in titles:
                csv_row.append(t)
                csv_row.append(values[t])

        writer.writerow(csv_row)

        values["bill_id"] = bill.id
        values["batch_id"] = bill.batch.id
        values["supply_id"] = supply.id
        values["site_id"] = None if site_code is None else site.id
        report_run.insert_row(sess, "", report_run_titles, values,
                              {"is_checked": False})

        for bill in sess.query(Bill).filter(
                Bill.supply == supply,
                Bill.start_date <= covered_finish,
                Bill.finish_date >= covered_start,
        ):

            for k, v in loads(bill.breakdown).items():
                if k.endswith("-gbp"):
                    add_gap(
                        caches,
                        gaps,
                        k[:-4],
                        bill.start_date,
                        bill.finish_date,
                        False,
                        v,
                    )

        # Avoid long-running transactions
        sess.commit()

    clumps = []
    for element, elgap in sorted(gaps.items()):
        for start_date, hhgap in sorted(elgap.items()):
            if hhgap["has_virtual"] and not hhgap["has_covered"]:

                if len(clumps) == 0 or not all((
                        clumps[-1]["element"] == element,
                        clumps[-1]["finish_date"] + HH == start_date,
                )):
                    clumps.append({
                        "element": element,
                        "start_date": start_date,
                        "finish_date": start_date,
                        "gbp": hhgap["gbp"],
                    })
                else:
                    clumps[-1]["finish_date"] = start_date

    for i, clump in enumerate(clumps):
        vals = {}
        for title in titles:
            if title.startswith("difference-") and title.endswith("-gbp"):
                vals[title] = 0
            else:
                vals[title] = None

        vals["covered-problem"] = "_".join((
            "missing",
            clump["element"],
            "supplyid",
            str(supply.id),
            "from",
            hh_format(clump["start_date"]),
        ))
        vals["imp-mpan-core"] = imp_mpan_core
        vals["exp-mpan-core"] = exp_mpan_core
        vals["batch"] = "missing_bill"
        vals["bill-start-date"] = hh_format(clump["start_date"])
        vals["bill-finish-date"] = hh_format(clump["finish_date"])
        vals["difference-net-gbp"] = clump["gbp"]
        writer.writerow(csv_make_val(vals[title]) for title in titles)

        vals["bill_id"] = None
        vals["batch_id"] = None
        vals["supply_id"] = supply.id
        vals["site_id"] = None if site_code is None else site.id

        report_run.insert_row(sess, "", titles, vals, {"is_checked": False})

    # Avoid long-running transactions
    sess.commit()
Exemple #3
0
def content(contract_id, end_year, end_month, months, user):
    caches = {}
    sess = f = supply_source = None
    try:
        sess = Session()
        contract = Contract.get_dc_by_id(sess, contract_id)

        month_list = list(
            c_months_u(finish_year=end_year,
                       finish_month=end_month,
                       months=months))
        start_date, finish_date = month_list[0][0], month_list[-1][-1]

        forecast_date = chellow.computer.forecast_date()
        running_name, finished_name = chellow.dloads.make_names(
            "dc_virtual_bills.csv", user)

        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")

        bill_titles = chellow.computer.contract_func(caches, contract,
                                                     "virtual_bill_titles")()
        header_titles = [
            "Import MPAN Core",
            "Export MPAN Core",
            "Start Date",
            "Finish Date",
        ]

        vb_func = chellow.computer.contract_func(caches, contract,
                                                 "virtual_bill")

        writer.writerow(header_titles + bill_titles)

        for era in (sess.query(Era).distinct().filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                Era.dc_contract == contract,
        ).options(joinedload(Era.channels)).order_by(Era.supply_id)):

            imp_mpan_core = era.imp_mpan_core
            if imp_mpan_core is None:
                imp_mpan_core_str = ""
                is_import = False
            else:
                is_import = True
                imp_mpan_core_str = imp_mpan_core

            exp_mpan_core = era.exp_mpan_core
            exp_mpan_core_str = "" if exp_mpan_core is None else exp_mpan_core

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)

            vals = [
                imp_mpan_core_str,
                exp_mpan_core_str,
                hh_format(chunk_start),
                hh_format(chunk_finish),
            ]

            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.dc_bill

            for title in bill_titles:
                vals.append(csv_make_val(bill.get(title)))
                if title in bill:
                    del bill[title]

            for k in sorted(bill.keys()):
                vals.append(k)
                vals.append(csv_make_val(bill[k]))

            writer.writerow(vals)

            # Avoid long-running transactions
            sess.rollback()
    except BadRequest as e:
        msg = "Problem "
        if supply_source is not None:
            msg += ("with supply " + supply_source.mpan_core +
                    " starting at " + hh_format(supply_source.start_date) +
                    " ")
        msg += str(e)
        writer.writerow([msg])
    except BaseException:
        msg = "Problem " + traceback.format_exc() + "\n"
        f.write(msg)
    finally:
        f.close()
        os.rename(running_name, finished_name)
        if sess is not None:
            sess.close()
Exemple #4
0
def content(supply_id, start_date, finish_date, user):
    forecast_date = to_utc(Datetime.max)
    caches = {}
    f = sess = era = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'supplies_duration.csv', user)
        f = open(running_name, mode='w', newline='')
        w = csv.writer(f, lineterminator='\n')
        w.writerow(
            (
                "Era Start", "Era Finish", "Supply Id", "Supply Name",
                "Source", "Generator Type", "Site Code", "Site Name",
                "Associated Site Codes", "From", "To", "PC", "MTC", "CoP",
                "SSC", "Properties", "MOP Contract", "MOP Account",
                "DC Contract", "DC Account", "Normal Reads", "Type",
                "Supply Start", "Supply Finish", "Import LLFC",
                "Import MPAN Core", "Import Supply Capacity",
                "Import Supplier",
                "Import Total MSP kWh", "Import Non-actual MSP kWh",
                "Import Total GSP kWh", "Import MD / kW", "Import MD Date",
                "Import MD / kVA",
                "Import Bad HHs", "Export LLFC", "Export MPAN Core",
                "Export Supply Capacity", "Export Supplier",
                "Export Total MSP kWh", "Export Non-actual MSP kWh",
                "Export GSP kWh", "Export MD / kW", "Export MD Date",
                "Export MD / kVA", "Export Bad HHs"))

        eras = sess.query(Era).filter(
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
            Era.start_date <= finish_date).order_by(
                Era.supply_id, Era.start_date).options(
            joinedload(Era.supply),
            joinedload(Era.supply).joinedload(Supply.source),
            joinedload(Era.supply).joinedload(Supply.generator_type),
            joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.imp_llfc),
            joinedload(Era.exp_llfc),
            joinedload(Era.mop_contract),
            joinedload(Era.dc_contract),
            joinedload(Era.channels),
            joinedload(Era.site_eras).joinedload(SiteEra.site),
            joinedload(Era.pc), joinedload(Era.cop),
            joinedload(Era.mtc).joinedload(Mtc.meter_type),
            joinedload(Era.imp_supplier_contract),
            joinedload(Era.exp_supplier_contract),
            joinedload(Era.ssc),
            joinedload(Era.site_eras))

        if supply_id is not None:
            eras = eras.filter(Era.supply == Supply.get_by_id(sess, supply_id))

        for era in eras:
            supply = era.supply
            site_codes = set()
            site = None
            for site_era in era.site_eras:
                if site_era.is_physical:
                    site = site_era.site
                else:
                    site_codes.add(site_era.site.code)

            sup_eras = sess.query(Era).filter(
                Era.supply == supply).order_by(Era.start_date).all()
            supply_start = sup_eras[0].start_date
            supply_finish = sup_eras[-1].finish_date

            if supply.generator_type is None:
                generator_type = ''
            else:
                generator_type = supply.generator_type.code

            ssc = era.ssc
            ssc_code = '' if ssc is None else ssc.code

            prime_reads = set()
            for read, rdate in chain(
                    sess.query(
                        RegisterRead, RegisterRead.previous_date).join(
                        RegisterRead.previous_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.previous_date >= start_date,
                    RegisterRead.previous_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES)).options(
                        joinedload(RegisterRead.bill)),

                    sess.query(
                        RegisterRead, RegisterRead.present_date).join(
                        RegisterRead.present_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.present_date >= start_date,
                    RegisterRead.present_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES)).options(
                        joinedload(RegisterRead.bill))):
                prime_bill = sess.query(Bill).join(BillType).filter(
                    Bill.supply == supply,
                    Bill.start_date <= read.bill.finish_date,
                    Bill.finish_date >= read.bill.start_date,
                    Bill.reads.any()).order_by(
                    Bill.issue_date.desc(), BillType.code).first()
                if prime_bill.id == read.bill.id:
                    prime_reads.add(str(rdate) + "_" + read.msn)

            supply_type = era.meter_category

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            num_hh = int(
                (chunk_finish + HH - chunk_start).total_seconds() / (30 * 60))

            w.writerow(
                [
                    hh_format(era.start_date),
                    hh_format(era.finish_date, ongoing_str=''),
                    supply.id, supply.name, supply.source.code, generator_type,
                    site.code, site.name, '| '.join(sorted(site_codes)),
                    hh_format(start_date), hh_format(finish_date), era.pc.code,
                    era.mtc.code, era.cop.code, ssc_code, era.properties,
                    era.mop_contract.name, era.mop_account,
                    era.dc_contract.name, era.dc_account, len(prime_reads),
                    supply_type, hh_format(supply_start),
                    hh_format(supply_finish, ongoing_str='')] + mpan_bit(
                    sess, supply, True, num_hh, era, chunk_start,
                    chunk_finish, forecast_date, caches) + mpan_bit(
                    sess, supply, False, num_hh, era, chunk_start,
                    chunk_finish, forecast_date, caches))

            # Avoid a long-running transaction
            sess.rollback()
    except BadRequest as e:
        if era is None:
            pref = "Problem: "
        else:
            pref = "Problem with era " + chellow.utils.url_root + "eras/" + \
                str(era.id) + "/edit : "
        f.write(pref + e.description)
    except BaseException as e:
        if era is None:
            pref = "Problem: "
        else:
            pref = "Problem with era " + str(era.id) + ": "
        f.write(pref + str(e))
        f.write(traceback.format_exc())
    finally:
        sess.close()
        f.close()
        os.rename(running_name, finished_name)
def content(site_id,
            g_supply_id,
            user,
            compression,
            finish_year,
            finish_month,
            months,
            now=None):
    if now is None:
        now = ct_datetime_now()
    report_context = {}
    sess = None
    month_list = list(
        c_months_u(finish_year=finish_year,
                   finish_month=finish_month,
                   months=months))
    start_date, finish_date = month_list[0][0], month_list[-1][-1]

    try:
        sess = Session()
        base_name = [
            "g_monthly_duration",
            hh_format(start_date).replace(" ",
                                          "_").replace(":",
                                                       "").replace("-", ""),
            "for",
            str(months),
            "months",
        ]

        forecast_from = chellow.computer.forecast_date()

        sites = (sess.query(Site).join(SiteGEra).join(GEra).filter(
            SiteGEra.is_physical == true()).distinct().order_by(Site.code))
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append("site")
            base_name.append(site.code)
        if g_supply_id is not None:
            g_supply = GSupply.get_by_id(sess, g_supply_id)
            base_name.append("g_supply")
            base_name.append(str(g_supply.id))
            sites = sites.filter(GEra.g_supply == g_supply)

        running_name, finished_name = chellow.dloads.make_names(
            "_".join(base_name) + ".ods", user)

        rf = open(running_name, "wb")
        site_rows = []
        g_era_rows = []

        era_header_titles = [
            "creation_date",
            "mprn",
            "supply_name",
            "exit_zone",
            "msn",
            "unit",
            "contract",
            "site_id",
            "site_name",
            "associated_site_ids",
            "month",
        ]
        site_header_titles = [
            "creation_date",
            "site_id",
            "site_name",
            "associated_site_ids",
            "month",
        ]
        summary_titles = ["kwh", "gbp", "billed_kwh", "billed_gbp"]

        vb_titles = []
        conts = (sess.query(GContract).join(GEra).join(GSupply).filter(
            GEra.start_date <= finish_date,
            or_(GEra.finish_date == null(), GEra.finish_date >= start_date),
        ).distinct().order_by(GContract.id))
        if g_supply_id is not None:
            conts = conts.filter(GEra.g_supply_id == g_supply_id)
        for cont in conts:
            title_func = chellow.computer.contract_func(
                report_context, cont, "virtual_bill_titles")
            if title_func is None:
                raise Exception("For the contract " + cont.name +
                                " there doesn't seem " +
                                "to be a 'virtual_bill_titles' function.")
            for title in title_func():
                if title not in vb_titles:
                    vb_titles.append(title)

        g_era_rows.append(era_header_titles + summary_titles + vb_titles)
        site_rows.append(site_header_titles + summary_titles)

        for month_start, month_finish in month_list:
            for site in sites.filter(
                    GEra.start_date <= month_finish,
                    or_(GEra.finish_date == null(),
                        GEra.finish_date >= month_start),
            ):
                site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0

                for g_era in (sess.query(GEra).join(SiteGEra).filter(
                        SiteGEra.site == site,
                        SiteGEra.is_physical == true(),
                        GEra.start_date <= month_finish,
                        or_(GEra.finish_date == null(),
                            GEra.finish_date >= month_start),
                ).options(
                        joinedload(GEra.g_contract),
                        joinedload(GEra.g_supply),
                        joinedload(GEra.g_supply).joinedload(
                            GSupply.g_exit_zone),
                ).order_by(GEra.id)):

                    g_supply = g_era.g_supply

                    if g_supply_id is not None and g_supply.id != g_supply_id:
                        continue

                    ss_start = hh_max(g_era.start_date, month_start)
                    ss_finish = hh_min(g_era.finish_date, month_finish)

                    ss = GDataSource(
                        sess,
                        ss_start,
                        ss_finish,
                        forecast_from,
                        g_era,
                        report_context,
                        None,
                    )

                    contract = g_era.g_contract
                    vb_function = contract_func(report_context, contract,
                                                "virtual_bill")
                    if vb_function is None:
                        raise BadRequest(
                            "The contract " + contract.name +
                            " doesn't have the virtual_bill() function.")
                    vb_function(ss)
                    bill = ss.bill

                    try:
                        gbp = bill["net_gbp"]
                    except KeyError:
                        gbp = 0
                        bill["problem"] += (
                            "For the supply " + ss.mprn +
                            " the virtual bill " + str(bill) +
                            " from the contract " + contract.name +
                            " does not contain the net_gbp key.")
                    try:
                        kwh = bill["kwh"]
                    except KeyError:
                        kwh = 0
                        bill["problem"] += ("For the supply " + ss.mprn +
                                            " the virtual bill " + str(bill) +
                                            " from the contract " +
                                            contract.name +
                                            " does not contain the 'kwh' key.")

                    billed_kwh = billed_gbp = 0

                    g_era_associates = {
                        s.site.code
                        for s in g_era.site_g_eras if not s.is_physical
                    }

                    for g_bill in sess.query(GBill).filter(
                            GBill.g_supply == g_supply,
                            GBill.start_date <= ss_finish,
                            GBill.finish_date >= ss_start,
                    ):
                        bill_start = g_bill.start_date
                        bill_finish = g_bill.finish_date
                        bill_duration = (bill_finish - bill_start
                                         ).total_seconds() + (30 * 60)
                        overlap_duration = (min(bill_finish, ss_finish) - max(
                            bill_start, ss_start)).total_seconds() + (30 * 60)
                        overlap_proportion = overlap_duration / bill_duration
                        billed_kwh += overlap_proportion * float(g_bill.kwh)
                        billed_gbp += overlap_proportion * float(g_bill.net)

                    associated_site_ids = ",".join(sorted(g_era_associates))
                    g_era_rows.append([
                        make_val(v) for v in [
                            now,
                            g_supply.mprn,
                            g_supply.name,
                            g_supply.g_exit_zone.code,
                            g_era.msn,
                            g_era.g_unit.code,
                            contract.name,
                            site.code,
                            site.name,
                            associated_site_ids,
                            month_finish,
                            kwh,
                            gbp,
                            billed_kwh,
                            billed_gbp,
                        ]
                    ] + [make_val(bill.get(t)) for t in vb_titles])

                    site_kwh += kwh
                    site_gbp += gbp
                    site_billed_kwh += billed_kwh
                    site_billed_gbp += billed_gbp

                linked_sites = ", ".join(s.code
                                         for s in site.find_linked_sites(
                                             sess, month_start, month_finish))

                site_rows.append([
                    make_val(v) for v in [
                        now,
                        site.code,
                        site.name,
                        linked_sites,
                        month_finish,
                        site_kwh,
                        site_gbp,
                        site_billed_kwh,
                        site_billed_gbp,
                    ]
                ])
                sess.rollback()
            write_spreadsheet(rf, compression, site_rows, g_era_rows)

    except BadRequest as e:
        site_rows.append(["Problem " + e.description])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names("error.txt", user)
            ef = open(r_name, "w")
            ef.write(msg + "\n")
            ef.close()
Exemple #6
0
def content(start_date, finish_date, contract_id, user):
    caches = {}
    sess = f = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'virtual_bills.csv', user)

        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')
        contract = Contract.get_supplier_by_id(sess, contract_id)
        forecast_date = chellow.computer.forecast_date()

        month_start = Datetime(
            start_date.year, start_date.month, 1, tzinfo=pytz.utc)

        month_finish = month_start + relativedelta(months=1) - HH

        bill_titles = contract_func(caches, contract, 'virtual_bill_titles')()

        for tpr in sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \
                join(Era).filter(
                    Era.start_date <= finish_date, or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date), or_(
                        Era.imp_supplier_contract == contract,
                        Era.exp_supplier_contract == contract)
                ).order_by(Tpr.code).distinct():
            for suffix in ('-kwh', '-rate', '-gbp'):
                bill_titles.append(tpr.code + suffix)
        writer.writerow(
            ['MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To'] +
            bill_titles)
        vb_func = contract_func(caches, contract, 'virtual_bill')

        while not month_start > finish_date:
            period_start = hh_max(start_date, month_start)
            period_finish = hh_min(finish_date, month_finish)

            for era in sess.query(Era).distinct().filter(
                    or_(
                        Era.imp_supplier_contract == contract,
                        Era.exp_supplier_contract == contract),
                    Era.start_date <= period_finish,
                    or_(
                        Era.finish_date == null(),
                        Era.finish_date >= period_start)):

                chunk_start = hh_max(period_start, era.start_date)
                chunk_finish = hh_min(period_finish, era.finish_date)

                polarities = []
                if era.imp_supplier_contract == contract:
                    polarities.append(True)
                if era.exp_supplier_contract == contract:
                    polarities.append(False)
                for polarity in polarities:
                    vals = []
                    data_source = SupplySource(
                        sess, chunk_start, chunk_finish, forecast_date, era,
                        polarity, caches)

                    site = sess.query(Site).join(SiteEra).filter(
                        SiteEra.era == era,
                        SiteEra.is_physical == true()).one()

                    vals = [
                        data_source.mpan_core, site.code, site.name,
                        data_source.supplier_account,
                        hh_format(data_source.start_date),
                        hh_format(data_source.finish_date)]

                    vb_func(data_source)
                    bill = data_source.supplier_bill
                    for title in bill_titles:
                        if title in bill:
                            val = make_val(bill[title])
                            del bill[title]
                        else:
                            val = ''
                        vals.append(val)

                    for k in sorted(bill.keys()):
                        vals.append(k)
                        vals.append(str(bill[k]))
                    writer.writerow(vals)

            month_start += relativedelta(months=1)
            month_finish = month_start + relativedelta(months=1) - HH
    except BadRequest as e:
        writer.writerow([e.description])
    except:
        writer.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        f.close()
        os.rename(running_name, finished_name)
Exemple #7
0
def content(year, supply_id, user):
    f = sess = None
    try:
        sess = Session()
        fname = ["crc", str(year), str(year + 1)]
        if supply_id is None:
            fname.append("all_supplies")
        else:
            fname.append("supply_" + str(supply_id))
        running_name, finished_name = chellow.dloads.make_names(
            "_".join(fname) + ".csv", user)
        f = open(running_name, mode="w", newline="")
        w = csv.writer(f, lineterminator="\n")

        ACTUAL_READ_TYPES = ["N", "N3", "C", "X", "CP"]
        w.writerow((
            "Chellow Supply Id",
            "Report Start",
            "Report Finish",
            "MPAN Core",
            "Site Id",
            "Site Name",
            "From",
            "To",
            "NHH Breakdown",
            "Actual HH Normal Days",
            "Actual AMR Normal Days",
            "Actual NHH Normal Days",
            "Actual Unmetered Normal Days",
            "Max HH Normal Days",
            "Max AMR Normal Days",
            "Max NHH Normal Days",
            "Max Unmetered Normal Days",
            "Total Actual Normal Days",
            "Total Max Normal Days",
            "Data Type",
            "HH kWh",
            "AMR kWh",
            "NHH kWh",
            "Unmetered kwh",
            "HH Filled kWh",
            "AMR Filled kWh",
            "Total kWh",
            "Note",
        ))

        year_start = Datetime(year, 4, 1, tzinfo=pytz.utc)
        year_finish = year_start + relativedelta(years=1) - HH

        supplies = (sess.query(Supply).join(Era).join(Source).filter(
            Source.code.in_(("net", "gen-net")),
            Era.imp_mpan_core != null(),
            Era.start_date <= year_finish,
            or_(Era.finish_date == null(), Era.finish_date >= year_start),
        ).distinct().order_by(Supply.id))
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Supply.id == supply.id)

        meter_types = ("hh", "amr", "nhh", "unmetered")

        for supply in supplies:
            total_kwh = dict([(mtype, 0) for mtype in meter_types])
            filled_kwh = dict([(mtype, 0) for mtype in ("hh", "amr")])
            normal_days = dict([(mtype, 0) for mtype in meter_types])
            max_normal_days = dict([(mtype, 0) for mtype in meter_types])

            breakdown = ""
            eras = (sess.query(Era).filter(
                Era.supply == supply,
                Era.start_date <= year_finish,
                or_(Era.finish_date == null(), Era.finish_date >= year_start),
            ).order_by(Era.start_date).all())
            supply_from = hh_max(eras[0].start_date, year_start)
            supply_to = hh_min(eras[-1].finish_date, year_finish)

            for era in eras:

                meter_type = era.meter_category

                period_start = hh_max(era.start_date, year_start)
                period_finish = hh_min(era.finish_date, year_finish)

                max_normal_days[meter_type] += (
                    (period_finish - period_start).total_seconds() +
                    60 * 30) / (60 * 60 * 24)

                mpan_core = era.imp_mpan_core
                site = (sess.query(Site).join(SiteEra).filter(
                    SiteEra.is_physical == true(),
                    SiteEra.era_id == era.id).one())

                if meter_type in ("nhh", "amr"):

                    read_list = []
                    read_keys = {}
                    pairs = []

                    prior_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(
                            BillType).join(RegisterRead.present_type).filter(
                                RegisterRead.units == 0,
                                ReadType.code.in_(ACTUAL_READ_TYPES),
                                Bill.supply == supply,
                                RegisterRead.present_date < period_start,
                                BillType.code != "W",
                            ).order_by(RegisterRead.present_date.desc()))
                    prior_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(
                            BillType).join(RegisterRead.previous_type).filter(
                                RegisterRead.units == 0,
                                ReadType.code.in_(ACTUAL_READ_TYPES),
                                Bill.supply == supply,
                                RegisterRead.previous_date < period_start,
                                BillType.code != "W",
                            ).order_by(RegisterRead.previous_date.desc()))
                    next_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(
                            BillType).join(RegisterRead.present_type).filter(
                                RegisterRead.units == 0,
                                ReadType.code.in_(ACTUAL_READ_TYPES),
                                Bill.supply == supply,
                                RegisterRead.present_date >= period_start,
                                BillType.code != "W",
                            ).order_by(RegisterRead.present_date))
                    next_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(
                            BillType).join(RegisterRead.previous_type).filter(
                                RegisterRead.units == 0,
                                ReadType.code.in_(ACTUAL_READ_TYPES),
                                Bill.supply == supply,
                                RegisterRead.previous_date >= period_start,
                                BillType.code != "W",
                            ).order_by(RegisterRead.previous_date))

                    for is_forwards in [False, True]:
                        if is_forwards:
                            pres_reads = next_pres_reads
                            prev_reads = next_prev_reads
                            read_list.reverse()
                        else:
                            pres_reads = prior_pres_reads
                            prev_reads = prior_prev_reads

                        prime_pres_read = None
                        prime_prev_read = None
                        while True:
                            while prime_pres_read is None:
                                try:
                                    pres_read = next(pres_reads)
                                except StopIteration:
                                    break

                                pres_date = pres_read.present_date
                                pres_msn = pres_read.msn
                                read_key = "_".join([str(pres_date), pres_msn])
                                if read_key in read_keys:
                                    continue

                                pres_bill = (
                                    sess.query(Bill).join(BillType).filter(
                                        Bill.reads.any(),
                                        Bill.supply == supply,
                                        Bill.finish_date >=
                                        pres_read.bill.start_date,
                                        Bill.start_date <=
                                        pres_read.bill.finish_date,
                                        BillType.code != "W",
                                    ).order_by(Bill.issue_date.desc(),
                                               BillType.code).first())
                                if pres_bill != pres_read.bill:
                                    continue

                                reads = dict((
                                    read.tpr.code,
                                    float(read.present_value) *
                                    float(read.coefficient),
                                ) for read in sess.query(RegisterRead).filter(
                                    RegisterRead.units == 0,
                                    RegisterRead.bill == pres_bill,
                                    RegisterRead.present_date == pres_date,
                                    RegisterRead.msn == pres_msn,
                                ))

                                prime_pres_read = {
                                    "date": pres_date,
                                    "reads": reads,
                                    "msn": pres_msn,
                                }
                                read_keys[read_key] = None
                            while prime_prev_read is None:
                                try:
                                    prev_read = next(prev_reads)
                                except StopIteration:
                                    break

                                prev_date = prev_read.previous_date
                                prev_msn = prev_read.msn
                                read_key = "_".join([str(prev_date), prev_msn])
                                if read_key in read_keys:
                                    continue
                                prev_bill = (
                                    sess.query(Bill).join(BillType).filter(
                                        Bill.reads.any(),
                                        Bill.supply_id == supply.id,
                                        Bill.finish_date >=
                                        prev_read.bill.start_date,
                                        Bill.start_date <=
                                        prev_read.bill.finish_date,
                                        BillType.code != "W",
                                    ).order_by(Bill.issue_date.desc(),
                                               BillType.code).first())
                                if prev_bill != prev_read.bill:
                                    continue

                                reads = dict((
                                    read.tpr.code,
                                    float(read.previous_value) *
                                    float(read.coefficient),
                                ) for read in sess.query(RegisterRead).filter(
                                    RegisterRead.units == 0,
                                    RegisterRead.bill_id == prev_bill.id,
                                    RegisterRead.previous_date == prev_date,
                                    RegisterRead.msn == prev_msn,
                                ))

                                prime_prev_read = {
                                    "date": prev_date,
                                    "reads": reads,
                                    "msn": prev_msn,
                                }
                                read_keys[read_key] = None

                            if prime_pres_read is None and prime_prev_read is None:
                                break
                            elif prime_pres_read is None:
                                read_list.append(prime_prev_read)
                                prime_prev_read = None
                            elif prime_prev_read is None:
                                read_list.append(prime_pres_read)
                                prime_pres_read = None
                            else:
                                if is_forwards:
                                    if (prime_pres_read["date"] <=
                                            prime_prev_read["date"]):
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None
                                    else:
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                else:
                                    if (prime_prev_read["date"] >=
                                            prime_pres_read["date"]):
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                    else:
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None

                            if len(read_list) > 1:
                                if is_forwards:
                                    aft_read = read_list[-2]
                                    fore_read = read_list[-1]
                                else:
                                    aft_read = read_list[-1]
                                    fore_read = read_list[-2]

                                if aft_read["msn"] == fore_read["msn"] and set(
                                        aft_read["reads"].keys()) == set(
                                            fore_read["reads"].keys()):
                                    pair_start_date = aft_read["date"] + HH
                                    pair_finish_date = fore_read["date"]

                                    num_hh = ((pair_finish_date + HH -
                                               pair_start_date
                                               ).total_seconds()) / (30 * 60)

                                    tprs = {}
                                    for tpr_code, initial_val in aft_read[
                                            "reads"].items():
                                        end_val = fore_read["reads"][tpr_code]

                                        kwh = end_val - initial_val

                                        if kwh < 0:
                                            digits = int(
                                                math.log10(initial_val)) + 1
                                            kwh = 10**digits + kwh

                                        tprs[tpr_code] = kwh / num_hh

                                    pairs.append({
                                        "start-date": pair_start_date,
                                        "finish-date": pair_finish_date,
                                        "tprs": tprs,
                                    })

                                    if len(pairs) > 0 and (
                                            not is_forwards or
                                        (is_forwards and read_list[-1]["date"]
                                         > period_finish)):
                                        break

                    breakdown += "read list - \n" + dumps(read_list) + "\n"
                    if len(pairs) == 0:
                        pairs.append({
                            "start-date": period_start,
                            "finish-date": period_finish,
                            "tprs": {
                                "00001": 0
                            },
                        })
                    else:
                        for pair in pairs:
                            pair_start = pair["start-date"]
                            pair_finish = pair["finish-date"]
                            if pair_start >= year_start and pair_finish <= year_finish:
                                block_start = hh_max(pair_start, period_start)
                                block_finish = hh_min(pair_finish,
                                                      period_finish)

                                if block_start <= block_finish:
                                    normal_days[meter_type] += (
                                        (block_finish -
                                         block_start).total_seconds() +
                                        60 * 30) / (60 * 60 * 24)

                    # smooth
                    for i in range(1, len(pairs)):
                        pairs[i -
                              1]["finish-date"] = pairs[i]["start-date"] - HH

                    # stretch
                    if pairs[0]["start-date"] > period_start:
                        pairs[0]["start-date"] = period_start

                    if pairs[-1]["finish-date"] < period_finish:
                        pairs[-1]["finish-date"] = period_finish

                    # chop
                    pairs = [
                        pair for pair in pairs
                        if not pair["start-date"] > period_finish
                        and not pair["finish-date"] < period_start
                    ]

                    # squash
                    if pairs[0]["start-date"] < period_start:
                        pairs[0]["start-date"] = period_start

                    if pairs[-1]["finish-date"] > period_finish:
                        pairs[-1]["finish-date"] = period_finish

                    for pair in pairs:
                        pair_hhs = ((pair["finish-date"] - pair["start-date"]
                                     ).total_seconds() + 30 * 60) / (60 * 30)
                        pair["pair_hhs"] = pair_hhs
                        for tpr_code, pair_kwh in pair["tprs"].items():
                            total_kwh[meter_type] += pair_kwh * pair_hhs

                    breakdown += "pairs - \n" + dumps(pairs)

                elif meter_type == "hh":
                    period_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).join(
                            Channel).filter(
                                Channel.imp_related == true(),
                                Channel.channel_type == "ACTIVE",
                                Channel.era == era,
                                HhDatum.start_date >= period_start,
                                HhDatum.start_date <= period_finish,
                            ).order_by(HhDatum.id))
                    year_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).join(
                            Channel).join(Era).filter(
                                Channel.imp_related == true(),
                                Channel.channel_type == "ACTIVE",
                                Era.supply == supply,
                                HhDatum.start_date >= year_start,
                                HhDatum.start_date <= year_finish,
                            ).order_by(HhDatum.id))

                    period_sum_kwhs = sum(period_kwhs)
                    year_sum_kwhs = sum(year_kwhs)
                    period_len_kwhs = len(period_kwhs)
                    year_len_kwhs = len(year_kwhs)
                    total_kwh[meter_type] += period_sum_kwhs
                    period_hhs = (period_finish + HH -
                                  period_start).total_seconds() / (60 * 30)
                    if year_len_kwhs > 0:
                        filled_kwh[meter_type] += (
                            year_sum_kwhs / year_len_kwhs *
                            (period_hhs - period_len_kwhs))
                    normal_days[meter_type] += (sess.query(
                        func.count(HhDatum.value)).join(Channel).filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == "ACTIVE",
                            Channel.era == era,
                            HhDatum.start_date >= period_start,
                            HhDatum.start_date <= period_finish,
                            HhDatum.status == "A",
                        ).one()[0] / 48)
                elif meter_type == "unmetered":
                    year_seconds = (year_finish -
                                    year_start).total_seconds() + 60 * 30
                    period_seconds = (period_finish -
                                      period_start).total_seconds() + 60 * 30

                    total_kwh[
                        meter_type] += era.imp_sc * period_seconds / year_seconds

                    normal_days[meter_type] += period_seconds / (60 * 60 * 24)

            # for full year 183
            total_normal_days = sum(normal_days.values())
            total_max_normal_days = sum(max_normal_days.values())
            is_normal = total_normal_days / total_max_normal_days >= 183 / 365

            w.writerow(
                [
                    supply.id,
                    hh_format(year_start),
                    hh_format(year_finish),
                    mpan_core,
                    site.code,
                    site.name,
                    hh_format(supply_from),
                    hh_format(supply_to),
                    breakdown,
                ] + [normal_days[t] for t in meter_types] +
                [max_normal_days[t] for t in meter_types] + [
                    total_normal_days,
                    total_max_normal_days,
                    "Actual" if is_normal else "Estimated",
                ] + [total_kwh[t] for t in meter_types] +
                [filled_kwh[t] for t in ("hh", "amr")] +
                [sum(total_kwh.values()) + sum(filled_kwh.values()), ""])

            # avoid a long running transaction
            sess.rollback()
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        f.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #8
0
def _process_era(
    sess,
    caches,
    vb_func,
    forecast_date,
    bill_titles,
    contract,
    period_start,
    period_finish,
    era,
):
    chunk_start = hh_max(period_start, era.start_date)
    chunk_finish = hh_min(period_finish, era.finish_date)

    polarities = []
    if era.imp_supplier_contract == contract:
        polarities.append(True)
    if era.exp_supplier_contract == contract:
        polarities.append(False)
    for polarity in polarities:
        vals = []
        data_source = SupplySource(
            sess,
            chunk_start,
            chunk_finish,
            forecast_date,
            era,
            polarity,
            caches,
        )

        site = (
            sess.query(Site)
            .join(SiteEra)
            .filter(SiteEra.era == era, SiteEra.is_physical == true())
            .one()
        )

        vals = [
            data_source.mpan_core,
            site.code,
            site.name,
            data_source.supplier_account,
            data_source.start_date,
            data_source.finish_date,
        ]

        vb_func(data_source)
        bill = data_source.supplier_bill
        for title in bill_titles:
            if title in bill:
                val = bill[title]
                del bill[title]
            else:
                val = ""
            vals.append(val)

        for k in sorted(bill.keys()):
            vals.append(k)
            vals.append(str(bill[k]))

        return vals
Exemple #9
0
    def __init__(
            self, sess, start_date, finish_date, forecast_date, g_era,
            caches, g_bill):
        self.sess = sess
        self.caches = caches
        self.forecast_date = forecast_date
        self.start_date = start_date
        self.finish_date = finish_date
        times = get_times(
            sess, caches, start_date, finish_date, forecast_date)
        self.years_back = times['years-back']
        self.history_start = times['history-start']
        self.history_finish = times['history-finish']

        self.problem = ''
        self.bill = defaultdict(int, {'problem': ''})
        self.hh_data = []
        self.rate_sets = defaultdict(set)

        self.g_bill = g_bill
        if self.g_bill is not None:
            self.g_bill_start = g_bill.start_date
            self.g_bill_finish = g_bill.finish_date
            self.is_last_g_bill_gen = \
                not self.g_bill_finish < self.start_date and not \
                self.g_bill_finish > self.finish_date

        self.g_era = g_era
        self.g_supply = g_era.g_supply
        self.mprn = self.g_supply.mprn
        self.account = g_era.account
        self.g_contract = g_era.g_contract

        self.consumption_info = ''

        if self.years_back == 0:
            hist_g_eras = [self.g_era]
        else:
            hist_g_eras = sess.query(GEra).filter(
                GEra.g_supply == self.g_supply,
                GEra.start_date <= self.history_finish,
                or_(
                    GEra.finish_date == null(),
                    GEra.finish_date >= self.history_start)).order_by(
                GEra.start_date).all()
            if len(hist_g_eras) == 0:
                hist_g_eras = sess.query(GEra).filter(
                    GEra.g_supply == self.g_supply).order_by(
                    GEra.start_date).limit(1).all()

        dte = start_date
        hist_map = {}

        for i, hist_g_era in enumerate(hist_g_eras):
            if self.history_start > hist_g_era.start_date:
                chunk_start = self.history_start
            else:
                if i == 0:
                    chunk_start = self.history_start
                else:
                    chunk_start = hist_g_era.start_date

            chunk_finish = hh_min(hist_g_era.finish_date, self.history_finish)
            if self.g_bill is None:
                read_list = []
                read_keys = {}
                pairs = []

                prior_pres_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType)
                    .join(GRegisterRead.pres_type).filter(
                        GReadType.code.in_(ACTUAL_READ_TYPES),
                        GBill.g_supply == self.g_supply,
                        GRegisterRead.present_date < chunk_start,
                        BillType.code != 'W').order_by(
                        GRegisterRead.pres_date.desc()))
                prior_prev_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType)
                    .join(GRegisterRead.prev_type).filter(
                        GReadType.code.in_(ACTUAL_READ_TYPES),
                        GBill.g_supply == self.g_supply,
                        GRegisterRead.prev_date < chunk_start,
                        BillType.code != 'W').order_by(
                        GRegisterRead.prev_date.desc()))
                next_pres_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType)
                    .join(GRegisterRead.pres_type).filter(
                        GReadType.code.in_(ACTUAL_READ_TYPES),
                        GBill.g_supply == self.g_supply,
                        GRegisterRead.pres_date >= chunk_start,
                        BillType.code != 'W').order_by(
                        GRegisterRead.pres_date))
                next_prev_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType)
                    .join(GRegisterRead.prev_type).filter(
                        GReadType.code.in_(ACTUAL_READ_TYPES),
                        GBill.g_supply == self.g_supply,
                        GRegisterRead.prev_date >= chunk_start,
                        BillType.code != 'W').order_by(
                        GRegisterRead.prev_date))

                for is_forwards in [False, True]:
                    if is_forwards:
                        pres_g_reads = next_pres_g_reads
                        prev_g_reads = next_prev_g_reads
                        read_list.reverse()
                    else:
                        pres_g_reads = prior_pres_g_reads
                        prev_g_reads = prior_prev_g_reads

                    prime_pres_g_read = None
                    prime_prev_g_read = None
                    while True:
                        while prime_pres_g_read is None:
                            try:
                                pres_g_read = pres_g_reads.next()
                            except StopIteration:
                                break

                            pres_date = pres_g_read.present_date
                            pres_msn = pres_g_read.msn
                            read_key = '_'.join([str(pres_date), pres_msn])
                            if read_key in read_keys:
                                continue

                            pres_g_bill = sess.query(GBill) \
                                .join(GRegisterRead) \
                                .join(BillType).filter(
                                    GBill.g_supply == self.g_supply,
                                    GBill.finish_date >= pres_g_read.pres_date,
                                    GBill.start_date <= pres_g_read.pres_date,
                                    BillType.code != 'W').order_by(
                                    GBill.issue_date.desc(),
                                    BillType.code).first()

                            if pres_g_bill != pres_g_read.g_bill:
                                continue

                            reads = sess.query(GRegisterRead.pres_value). \
                                filter(
                                    GRegisterRead.g_bill == pres_g_bill,
                                    GRegisterRead.pres_date == pres_date,
                                    GRegisterRead.msn == pres_msn).all()

                            prime_pres_g_read = {
                                'date': pres_date, 'reads': reads,
                                'msn': pres_msn}
                            read_keys[read_key] = None

                        while prime_prev_g_read is None:

                            try:
                                prev_g_read = prev_g_reads.next()
                            except StopIteration:
                                break

                            prev_date = prev_g_read.prev_date
                            prev_msn = prev_g_read.msn
                            read_key = '_'.join([str(prev_date), prev_msn])
                            if read_key in read_keys:
                                continue

                            prev_g_bill = sess.query(GBill).join(BillType). \
                                filter(
                                GBill.g_supply == self.g_supply,
                                GBill.finish_date >=
                                prev_g_read.g_bill.start_date,
                                GBill.start_date <=
                                prev_g_read.g_bill.start_date,
                                BillType.code != 'W').order_by(
                                GBill.issue_date.desc(),
                                BillType.code).first()
                            if prev_g_bill != prev_g_read.g_bill:
                                continue

                            reads = sess.query(GRegisterRead.prev_val).filter(
                                GRegisterRead.g_bill == prev_g_bill,
                                GRegisterRead.prev_date == prev_date,
                                GRegisterRead.msn == prev_msn).all()

                            prime_prev_read = {
                                'date': prev_date, 'reads': reads,
                                'msn': prev_msn}
                            read_keys[read_key] = None

                        if prime_pres_g_read is None and \
                                prime_prev_g_read is None:
                            break
                        elif prime_pres_g_read is None:
                            read_list.append(prime_prev_g_read)
                            prime_prev_g_read = None
                        elif prime_prev_g_read is None:
                            read_list.append(prime_pres_g_read)
                            prime_pres_g_read = None
                        else:
                            if is_forwards:
                                if prime_prev_g_read['date'] == \
                                        prime_pres_g_read['date'] or \
                                        prime_pres_g_read['date'] < \
                                        prime_prev_g_read['date']:
                                    read_list.append(prime_pres_g_read)
                                    prime_pres_g_read = None
                                else:
                                    read_list.append(prime_prev_read)
                                    prime_prev_read = None
                            else:
                                if prime_prev_read['date'] == \
                                        prime_pres_g_read['date'] or \
                                        prime_prev_g_read['date'] > \
                                        prime_pres_g_read['date']:
                                    read_list.append(prime_prev_read)
                                    prime_prev_read = None
                                else:
                                    read_list.append(prime_pres_g_read)
                                    prime_pres_g_read = None

                        if len(read_list) > 1:
                            if is_forwards:
                                aft_read = read_list[-2]
                                fore_read = read_list[-1]
                            else:
                                aft_read = read_list[-1]
                                fore_read = read_list[-2]

                            if aft_read['msn'] == fore_read['msn']:
                                num_hh = (
                                    fore_read['date'] - aft_read['date']
                                    ).total_seconds() / (30 * 60)

                                tprs = {}
                                for tpr_code, initial_val in \
                                        aft_read['reads'].items():
                                    if tpr_code in fore_read['reads']:
                                        end_val = fore_read['reads'][tpr_code]
                                    else:
                                        continue

                                    kwh = end_val - initial_val

                                    if kwh < 0:
                                        digits = int(
                                            math.log10(initial_val)) + 1
                                        kwh = 10 ** digits + kwh

                                    tprs[tpr_code] = kwh / num_hh

                                pairs.append(
                                    {
                                        'start-date': aft_read['date'],
                                        'finish-date': fore_read['date'] + HH,
                                        'tprs': tprs})

                                if len(pairs) > 0 and (
                                        not is_forwards or (
                                            is_forwards and
                                            read_list[-1]['date'] >
                                            chunk_finish)):
                                        break

                self.consumption_info += 'read list - \n' + str(read_list) \
                    + "\n"
                if len(pairs) == 0:
                    pairs.append(
                        {
                            'start-date': chunk_start,
                            'finish-date': chunk_finish,
                            'tprs': {'00001': 0}})

                # smooth
                for i in range(1, len(pairs)):
                    pairs[i - 1]['finish-date'] = pairs[i]['start-date'] - HH

                # stretch
                if pairs[0]['start-date'] > chunk_start:
                    pairs[0]['start-date'] = chunk_start

                if pairs[-1]['finish-date'] < chunk_finish:
                    pairs[-1]['finish-date'] = chunk_finish

                # chop
                pairs = [
                    pair for pair in pairs
                    if not pair['start-date'] > chunk_finish and not
                    pair['finish-date'] < chunk_start]

                # squash
                if pairs[0]['start-date'] < chunk_start:
                    pairs[0]['start-date'] = chunk_start

                if pairs[-1]['finish-date'] > chunk_finish:
                    pairs[-1]['finish-date'] = chunk_finish

                self.consumption_info += 'pairs - \n' + str(pairs)

                for pair in pairs:
                    pair_hhs = (
                        pair['finish-date'] + HH - pair['start-date']
                        ).total_seconds() / (60 * 30)
                    orig_dte = dte
                    for tpr_code, pair_kwh in pair['tprs'].items():
                        hh_date = pair['start-date']
                        dte = orig_dte
                        datum_generator = _datum_generator(
                            sess, self.caches, self.years_back)
                        hh_part = []

                        while not hh_date > pair['finish-date']:
                            datum = datum_generator(sess, dte)
                            if datum is not None:
                                hh_part.append(datum.copy())
                            hh_date += HH
                            dte += HH

                        kwh = pair_kwh * pair_hhs / len(hh_part) \
                            if len(hh_part) > 0 else 0

                        for datum in hh_part:
                            datum.update(
                                {
                                    'msp-kw': kwh * 2, 'msp-kwh': kwh,
                                    'hist-kwh': kwh, 'imp-msp-kvar': 0,
                                    'imp-msp-kvarh': 0, 'exp-msp-kvar': 0,
                                    'exp-msp-kvarh': 0})
                        self.hh_data += hh_part
            else:
                g_bills = []
                for cand_bill in sess.query(GBill).join(GBatch) \
                        .join(BillType).filter(
                            GBill.g_supply == self.g_supply,
                            GBill.g_reads.any(),
                            GBatch.g_contract == self.g_contract,
                            GBill.start_date <= chunk_finish,
                            GBill.finish_date >= chunk_start,
                            BillType.code != 'W').order_by(
                            GBill.issue_date.desc(), GBill.start_date):
                    can_insert = True
                    for g_bill in g_bills:
                        if not cand_bill.start_date > g_bill.finish_date \
                                and not cand_bill.finish_date < \
                                g_bill.start_date:
                            can_insert = False
                            break
                    if can_insert:
                        g_bills.append(cand_bill)

                prev_type_alias = aliased(GReadType)
                pres_type_alias = aliased(GReadType)
                for g_bill in g_bills:
                    units_consumed = 0
                    for prev_date, prev_value, prev_type, pres_date, \
                            pres_value, pres_type, g_units_code, \
                            g_units_factor in sess.query(
                            GRegisterRead.prev_date,
                            cast(GRegisterRead.prev_value, Float),
                            prev_type_alias.code,
                            GRegisterRead.pres_date,
                            cast(GRegisterRead.pres_value, Float),
                            pres_type_alias.code, GUnits.code,
                            cast(GUnits.factor, Float)).join(
                                GUnits).join(
                                prev_type_alias,
                                GRegisterRead.prev_type_id ==
                                prev_type_alias.id).join(
                            pres_type_alias,
                            GRegisterRead.pres_type_id ==
                            pres_type_alias.id).filter(
                            GRegisterRead.g_bill == g_bill) \
                            .order_by(GRegisterRead.pres_date):
                        if prev_date < g_bill.start_date:
                            self.problem += "There's a read before the " \
                                "start of the bill!"
                        if pres_date > g_bill.finish_date:
                            self.problem += "There's a read after the end " \
                                "of the bill!"
                        advance = pres_value - prev_value
                        if advance < 0:
                            self.problem += "Clocked? "
                            digits = int(math.log10(prev_value)) + 1
                            advance = 10 ** digits - prev_value + pres_value
                        units_consumed += advance

                    bill_s = (
                        g_bill.finish_date - g_bill.start_date +
                        timedelta(minutes=30)).total_seconds()
                    hh_units_consumed = units_consumed / (bill_s / (60 * 30))

                    for hh_date in hh_range(
                            g_bill.start_date, g_bill.finish_date):
                        hist_map[hh_date] = {
                            'units_code': g_units_code,
                            'units_factor': g_units_factor,
                            'units_consumed': hh_units_consumed}

        for d in datum_range(
                sess, self.caches, self.years_back, start_date, finish_date):
            h = d.copy()
            hist_start = h['hist_start']
            h.update(hist_map.get(hist_start, {}))
            h['kwh'] = h['units_consumed'] * h['units_factor'] * \
                h['correction_factor'] * h['cv']
            self.hh_data.append(h)
Exemple #10
0
def content(supply_id, file_name, start_date, finish_date, user):
    caches = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'supply_virtual_bills_' + str(supply_id) + '.csv', user)
        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')

        supply = Supply.get_by_id(sess, supply_id)

        forecast_date = chellow.computer.forecast_date()

        prev_titles = None

        for era in sess.query(Era).filter(
                Era.supply == supply, Era.start_date < finish_date, or_(
                    Era.finish_date == null(),
                    Era.finish_date > start_date)).order_by(Era.start_date):

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            site = sess.query(Site).join(SiteEra).filter(
                SiteEra.era == era, SiteEra.is_physical == true()).one()

            ds = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, True,
                caches)

            titles = [
                'Imp MPAN Core', 'Exp MPAN Core', 'Site Code', 'Site Name',
                'Account', 'From', 'To', '']

            output_line = [
                era.imp_mpan_core, era.exp_mpan_core, site.code,
                site.name, ds.supplier_account, hh_format(ds.start_date),
                hh_format(ds.finish_date), '']

            mop_titles = ds.contract_func(
                era.mop_contract, 'virtual_bill_titles')()
            titles.extend(['mop-' + t for t in mop_titles])

            ds.contract_func(era.mop_contract, 'virtual_bill')(ds)
            bill = ds.mop_bill
            for title in mop_titles:
                if title in bill:
                    output_line.append(bill[title])
                    del bill[title]
                else:
                    output_line.append('')

            for k in sorted(bill.keys()):
                output_line.extend([k, bill[k]])

            output_line.append('')
            dc_titles = ds.contract_func(
                era.hhdc_contract, 'virtual_bill_titles')()
            titles.append('')
            titles.extend(['dc-' + t for t in dc_titles])

            ds.contract_func(era.hhdc_contract, 'virtual_bill')(ds)
            bill = ds.dc_bill
            for title in dc_titles:
                output_line.append(bill.get(title, ''))
                if title in bill:
                    del bill[title]
            for k in sorted(bill.keys()):
                output_line.extend([k, bill[k]])

            tpr_query = sess.query(Tpr).join(MeasurementRequirement). \
                join(Ssc).join(Era).filter(
                    Era.start_date <= chunk_finish, or_(
                        Era.finish_date == null(),
                        Era.finish_date >= chunk_start)
                ).order_by(Tpr.code).distinct()

            if era.imp_supplier_contract is not None:
                output_line.append('')
                supplier_titles = ds.contract_func(
                    era.imp_supplier_contract, 'virtual_bill_titles')()
                for tpr in tpr_query.filter(
                        Era.imp_supplier_contract != null()):
                    for suffix in ('-kwh', '-rate', '-gbp'):
                        supplier_titles.append(tpr.code + suffix)
                titles.append('')
                titles.extend(['imp-supplier-' + t for t in supplier_titles])

                ds.contract_func(era.imp_supplier_contract, 'virtual_bill')(ds)
                bill = ds.supplier_bill

                for title in supplier_titles:
                    if title in bill:
                        output_line.append(bill[title])
                        del bill[title]
                    else:
                        output_line.append('')

                for k in sorted(bill.keys()):
                    output_line.extend([k, bill[k]])

            if era.exp_supplier_contract is not None:
                ds = chellow.computer.SupplySource(
                    sess, chunk_start, chunk_finish, forecast_date, era, False,
                    caches)

                output_line.append('')
                supplier_titles = ds.contract_func(
                    era.exp_supplier_contract, 'virtual_bill_titles')()
                for tpr in tpr_query.filter(
                        Era.exp_supplier_contract != null()):
                    for suffix in ('-kwh', '-rate', '-gbp'):
                        supplier_titles.append(tpr.code + suffix)
                titles.append('')
                titles.extend(['exp-supplier-' + t for t in supplier_titles])

                ds.contract_func(
                    era.exp_supplier_contract, 'virtual_bill')(ds)
                bill = ds.supplier_bill
                for title in supplier_titles:
                    output_line.append(bill.get(title, ''))
                    if title in bill:
                        del bill[title]

                for k in sorted(bill.keys()):
                    output_line.extend([k, bill[k]])

            if titles != prev_titles:
                prev_titles = titles
                writer.writerow([str(v) for v in titles])
            for i, val in enumerate(output_line):
                output_line[i] = csv_make_val(val)
            writer.writerow(output_line)
    except BadRequest as e:
        writer.writerow(["Problem: " + e.description])
    except:
        writer.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #11
0
def create_csv(f, sess, start_date, finish_date, contract_id):
    caches = {}
    writer = csv.writer(f, lineterminator="\n")
    contract = Contract.get_supplier_by_id(sess, contract_id)
    forecast_date = chellow.computer.forecast_date()

    start_date_ct, finish_date_ct = to_ct(start_date), to_ct(finish_date)

    month_pairs = c_months_u(
        start_year=start_date_ct.year,
        start_month=start_date_ct.month,
        finish_year=finish_date_ct.year,
        finish_month=finish_date_ct.month,
    )

    bill_titles = contract_func(caches, contract, "virtual_bill_titles")()

    for tpr in (
        sess.query(Tpr)
        .join(MeasurementRequirement)
        .join(Ssc)
        .join(Era)
        .filter(
            Era.start_date <= finish_date,
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
            or_(
                Era.imp_supplier_contract == contract,
                Era.exp_supplier_contract == contract,
            ),
        )
        .order_by(Tpr.code)
        .distinct()
    ):
        for suffix in ("-kwh", "-rate", "-gbp"):
            bill_titles.append(tpr.code + suffix)
    writer.writerow(
        ["MPAN Core", "Site Code", "Site Name", "Account", "From", "To"] + bill_titles
    )
    vb_func = contract_func(caches, contract, "virtual_bill")

    for month_start, month_finish in month_pairs:
        period_start = hh_max(start_date, month_start)
        period_finish = hh_min(finish_date, month_finish)

        for era in (
            sess.query(Era)
            .filter(
                or_(
                    Era.imp_supplier_contract == contract,
                    Era.exp_supplier_contract == contract,
                ),
                Era.start_date <= period_finish,
                or_(Era.finish_date == null(), Era.finish_date >= period_start),
            )
            .order_by(Era.imp_mpan_core)
        ):
            try:
                vals = _process_era(
                    sess,
                    caches,
                    vb_func,
                    forecast_date,
                    bill_titles,
                    contract,
                    period_start,
                    period_finish,
                    era,
                )
                writer.writerow(csv_make_val(v) for v in vals)
            except BadRequest as e:
                raise BadRequest(
                    f"Problem with {chellow.utils.url_root}eras/{era.id}/edit "
                    f"{e.description}"
                )
Exemple #12
0
def _bill_kwh(
    sess,
    caches,
    g_supply,
    hist_g_era,
    chunk_start,
    chunk_finish,
    g_cv_id,
    hist_map,
    g_ldz_code,
):

    cf = float(hist_g_era.correction_factor)
    g_unit = hist_g_era.g_unit
    unit_code, unit_factor = g_unit.code, float(g_unit.factor)

    for hh_date in hh_range(caches, chunk_start, chunk_finish):
        cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date, g_ldz_code)
        hist_map[hh_date] = {
            "unit_code": unit_code,
            "unit_factor": unit_factor,
            "correction_factor": cf,
            "calorific_value": cv,
            "avg_cv": avg_cv,
        }

    g_bills = dict(
        (b.id, b)
        for b in sess.query(GBill)
        .filter(
            GBill.g_supply == g_supply,
            GBill.start_date <= chunk_finish,
            GBill.finish_date >= chunk_start,
        )
        .order_by(GBill.issue_date.desc(), GBill.start_date)
    )
    while True:
        to_del = None
        for a, b in combinations(g_bills.values(), 2):
            if all(
                (
                    a.start_date == b.start_date,
                    a.finish_date == b.finish_date,
                    a.kwh == -1 * b.kwh,
                    a.net == -1 * b.net,
                    a.vat == -1 * b.vat,
                    a.gross == -1 * b.gross,
                )
            ):
                to_del = (a.id, b.id)
                break
        if to_del is None:
            break
        else:
            for k in to_del:
                del g_bills[k]

    for _, g_bill in sorted(g_bills.items()):
        units_consumed = 0
        for prev_value, pres_value in sess.query(
            cast(GRegisterRead.prev_value, Float), cast(GRegisterRead.pres_value, Float)
        ).filter(GRegisterRead.g_bill == g_bill):
            units_diff = pres_value - prev_value
            if units_diff < 0:
                total_units = 10 ** len(str(int(prev_value)))
                c_units = total_units - prev_value + pres_value
                if c_units < abs(units_diff):
                    units_diff = c_units

            units_consumed += units_diff

        bill_s = (
            g_bill.finish_date - g_bill.start_date + timedelta(minutes=30)
        ).total_seconds()
        hh_units_consumed = units_consumed / (bill_s / (60 * 30))

        block_start = hh_max(g_bill.start_date, chunk_start)
        block_finish = hh_min(g_bill.finish_date, chunk_finish)
        for hh_date in hh_range(caches, block_start, block_finish):
            hist_map[hh_date]["units_consumed"] = hh_units_consumed
Exemple #13
0
    def __init__(
        self, sess, start_date, finish_date, forecast_date, g_era, caches, g_bill
    ):
        self.sess = sess
        self.caches = caches
        self.forecast_date = forecast_date
        self.start_date = start_date
        self.finish_date = finish_date
        self.bill_hhs = {}
        times = get_times(sess, caches, start_date, finish_date, forecast_date)
        self.years_back = times["years-back"]
        self.history_start = times["history-start"]
        self.history_finish = times["history-finish"]

        self.problem = ""
        self.bill = defaultdict(int, {"problem": ""})
        self.hh_data = []
        self.rate_sets = defaultdict(set)

        self.g_bill = g_bill
        if self.g_bill is not None:
            self.g_bill_start = g_bill.start_date
            self.g_bill_finish = g_bill.finish_date
            self.is_last_g_bill_gen = (
                not self.g_bill_finish < self.start_date
                and not self.g_bill_finish > self.finish_date
            )

        self.g_era = g_era
        self.g_supply = g_era.g_supply
        self.mprn = self.g_supply.mprn
        self.g_exit_zone_code = self.g_supply.g_exit_zone.code
        self.g_ldz_code = self.g_supply.g_exit_zone.g_ldz.code
        self.g_dn_code = self.g_supply.g_exit_zone.g_ldz.g_dn.code
        self.account = g_era.account
        self.g_reading_frequency = g_era.g_reading_frequency
        self.g_reading_frequency_code = self.g_reading_frequency.code
        self.g_contract = g_era.g_contract

        self.consumption_info = ""

        if self.years_back == 0:
            hist_g_eras = [self.g_era]
        else:
            hist_g_eras = (
                sess.query(GEra)
                .filter(
                    GEra.g_supply == self.g_supply,
                    GEra.start_date <= self.history_finish,
                    or_(
                        GEra.finish_date == null(),
                        GEra.finish_date >= self.history_start,
                    ),
                )
                .order_by(GEra.start_date)
                .all()
            )
            if len(hist_g_eras) == 0:
                hist_g_eras = (
                    sess.query(GEra)
                    .filter(GEra.g_supply == self.g_supply)
                    .order_by(GEra.start_date)
                    .limit(1)
                    .all()
                )

        g_cv_id = get_non_core_contract_id("g_cv")
        hist_map = {}

        for i, hist_g_era in enumerate(hist_g_eras):
            if self.history_start > hist_g_era.start_date:
                chunk_start = self.history_start
            else:
                if i == 0:
                    chunk_start = self.history_start
                else:
                    chunk_start = hist_g_era.start_date

            chunk_finish = hh_min(hist_g_era.finish_date, self.history_finish)
            if self.g_bill is None:
                self.consumption_info += _no_bill_kwh(
                    sess,
                    caches,
                    self.g_supply,
                    chunk_start,
                    chunk_finish,
                    hist_g_era,
                    g_cv_id,
                    self.g_ldz_code,
                    hist_map,
                    forecast_date,
                )
            else:
                _bill_kwh(
                    sess,
                    self.caches,
                    self.g_supply,
                    hist_g_era,
                    chunk_start,
                    chunk_finish,
                    g_cv_id,
                    hist_map,
                    self.g_ldz_code,
                )

        for d in datum_range(
            sess, self.caches, self.years_back, start_date, finish_date
        ):
            h = d.copy()
            hist_start = h["hist_start"]
            h.update(hist_map.get(hist_start, {}))
            h["kwh"] = (
                h["units_consumed"]
                * h["unit_factor"]
                * h["correction_factor"]
                * h["calorific_value"]
                / 3.6
            )
            h["kwh_avg"] = (
                h["units_consumed"]
                * h["unit_factor"]
                * h["correction_factor"]
                * h["avg_cv"]
                / 3.6
            )
            h["ug_rate"] = float(
                get_file_rates(self.caches, "g_ug", h["start_date"])["ug_gbp_per_kwh"][
                    self.g_exit_zone_code
                ]
            )
            self.hh_data.append(h)
            self.bill_hhs[d["start_date"]] = {}
Exemple #14
0
def content(year, supply_id, user):
    f = sess = None
    try:
        sess = Session()
        fname = ['crc', str(year), str(year + 1)]
        if supply_id is None:
            fname.append('all_supplies')
        else:
            fname.append('supply_' + str(supply_id))
        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(fname) + '.csv', user)
        f = open(running_name, "w")

        ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP']
        f.write(
            ','.join(
                (
                    'Chellow Supply Id', 'MPAN Core', 'Site Id', 'Site Name',
                    'From', 'To', 'NHH Breakdown', 'Actual HH Normal Days',
                    'Actual AMR Normal Days', 'Actual NHH Normal Days',
                    'Actual Unmetered Normal Days', 'Max HH Normal Days',
                    'Max AMR Normal Days', 'Max NHH Normal Days',
                    'Max Unmetered Normal Days', 'Total Actual Normal Days',
                    'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh',
                    'NHH kWh', 'Unmetered kwh', 'HH Filled kWh',
                    'AMR Filled kWh', 'Total kWh', 'Note')) + '\n')

        year_start = Datetime(year, 4, 1, tzinfo=pytz.utc)
        year_finish = year_start + relativedelta(years=1) - HH

        supplies = sess.query(Supply).join(Era).join(Source).filter(
            Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(),
            Era.start_date <= year_finish, or_(
                Era.finish_date == null(),
                Era.finish_date >= year_start)).distinct().order_by(Supply.id)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Supply.id == supply.id)

        meter_types = ('hh', 'amr', 'nhh', 'unmetered')

        for supply in supplies:
            total_kwh = dict([(mtype, 0) for mtype in meter_types])
            filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')])
            normal_days = dict([(mtype, 0) for mtype in meter_types])
            max_normal_days = dict([(mtype, 0) for mtype in meter_types])

            breakdown = ''

            for era in sess.query(Era).filter(
                    Era.supply_id == supply.id, Era.start_date <= year_finish,
                    or_(
                        Era.finish_date == null(),
                        Era.finish_date >= year_start)):

                meter_type = era.make_meter_category()

                era_start = era.start_date
                period_start = era_start \
                    if era_start > year_start else year_start

                era_finish = era.finish_date
                if hh_after(era_finish, year_finish):
                    period_finish = year_finish
                else:
                    period_finish = era_finish

                max_normal_days[meter_type] += (
                    (period_finish - period_start).total_seconds() +
                    60 * 30) / (60 * 60 * 24)

                mpan_core = era.imp_mpan_core
                site = sess.query(Site).join(SiteEra).filter(
                    SiteEra.is_physical == true(),
                    SiteEra.era_id == era.id).one()

                if meter_type == 'nhh':

                    read_list = []
                    read_keys = {}
                    pairs = []

                    prior_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.present_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.present_date < period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.present_date.desc()))
                    prior_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.previous_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.previous_date < period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.previous_date.desc()))
                    next_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.present_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.present_date >= period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.present_date))
                    next_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType).
                        join(RegisterRead.previous_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.previous_date >= period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.previous_date))

                    for is_forwards in [False, True]:
                        if is_forwards:
                            pres_reads = next_pres_reads
                            prev_reads = next_prev_reads
                            read_list.reverse()
                        else:
                            pres_reads = prior_pres_reads
                            prev_reads = prior_prev_reads

                        prime_pres_read = None
                        prime_prev_read = None
                        while True:
                            while prime_pres_read is None:
                                try:
                                    pres_read = next(pres_reads)
                                except StopIteration:
                                    break

                                pres_date = pres_read.present_date
                                pres_msn = pres_read.msn
                                read_key = '_'.join([str(pres_date), pres_msn])
                                if read_key in read_keys:
                                    continue

                                pres_bill = sess.query(Bill).join(BillType). \
                                    filter(
                                        Bill.reads.any(),
                                        Bill.supply == supply,
                                        Bill.finish_date >=
                                        pres_read.bill.start_date,
                                        Bill.start_date <=
                                        pres_read.bill.finish_date,
                                        BillType.code != 'W').order_by(
                                        Bill.issue_date.desc(),
                                        BillType.code).first()
                                if pres_bill != pres_read.bill:
                                    continue

                                reads = dict(
                                    (
                                        read.tpr.code,
                                        float(read.present_value) *
                                        float(read.coefficient))
                                    for read in sess.query(RegisterRead).
                                    filter(
                                        RegisterRead.units == 0,
                                        RegisterRead.bill == pres_bill,
                                        RegisterRead.present_date == pres_date,
                                        RegisterRead.msn == pres_msn))

                                prime_pres_read = {
                                    'date': pres_date, 'reads': reads,
                                    'msn': pres_msn}
                                read_keys[read_key] = None
                            while prime_prev_read is None:
                                try:
                                    prev_read = next(prev_reads)
                                except StopIteration:
                                    break

                                prev_date = prev_read.previous_date
                                prev_msn = prev_read.msn
                                read_key = '_'.join([str(prev_date), prev_msn])
                                if read_key in read_keys:
                                    continue
                                prev_bill = sess.query(Bill).join(BillType). \
                                    filter(
                                        Bill.reads.any(),
                                        Bill.supply_id == supply.id,
                                        Bill.finish_date >=
                                        prev_read.bill.start_date,
                                        Bill.start_date <=
                                        prev_read.bill.finish_date,
                                        BillType.code != 'W').order_by(
                                        Bill.issue_date.desc(),
                                        BillType.code).first()
                                if prev_bill != prev_read.bill:
                                    continue

                                reads = dict(
                                    (
                                        read.tpr.code,
                                        float(read.previous_value) *
                                        float(read.coefficient))
                                    for read in sess.query(RegisterRead).
                                    filter(
                                        RegisterRead.units == 0,
                                        RegisterRead.bill_id == prev_bill.id,
                                        RegisterRead.previous_date ==
                                        prev_date,
                                        RegisterRead.msn == prev_msn))

                                prime_prev_read = {
                                    'date': prev_date, 'reads': reads,
                                    'msn': prev_msn}
                                read_keys[read_key] = None

                            if prime_pres_read is None and \
                                    prime_prev_read is None:
                                break
                            elif prime_pres_read is None:
                                read_list.append(prime_prev_read)
                                prime_prev_read = None
                            elif prime_prev_read is None:
                                read_list.append(prime_pres_read)
                                prime_pres_read = None
                            else:
                                if is_forwards:
                                    if prime_pres_read['date'] <= \
                                            prime_prev_read['date']:
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None
                                    else:
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                else:
                                    if prime_prev_read['date'] >= \
                                            prime_pres_read['date']:
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                    else:
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None

                            if len(read_list) > 1:
                                if is_forwards:
                                    aft_read = read_list[-2]
                                    fore_read = read_list[-1]
                                else:
                                    aft_read = read_list[-1]
                                    fore_read = read_list[-2]

                                if aft_read['msn'] == fore_read['msn'] and \
                                        set(aft_read['reads'].keys()) == \
                                        set(fore_read['reads'].keys()):
                                    pair_start_date = aft_read['date'] + HH
                                    pair_finish_date = fore_read['date']

                                    num_hh = (
                                        (
                                            pair_finish_date + HH -
                                            pair_start_date).total_seconds()
                                        ) / (30 * 60)

                                    tprs = {}
                                    for tpr_code, initial_val in \
                                            aft_read['reads'].items():
                                        end_val = fore_read['reads'][tpr_code]

                                        kwh = end_val - initial_val

                                        if kwh < 0:
                                            digits = int(
                                                math.log10(initial_val)) + 1
                                            kwh = 10 ** digits + kwh

                                        tprs[tpr_code] = kwh / num_hh

                                    pairs.append(
                                        {
                                            'start-date': pair_start_date,
                                            'finish-date': pair_finish_date,
                                            'tprs': tprs})

                                    if len(pairs) > 0 and \
                                            (not is_forwards or (
                                                is_forwards and
                                                read_list[-1]['date'] >
                                                period_finish)):
                                            break

                    breakdown += 'read list - \n' + str(read_list) + "\n"
                    if len(pairs) == 0:
                        pairs.append(
                            {
                                'start-date': period_start,
                                'finish-date': period_finish,
                                'tprs': {'00001': 0}})
                    else:
                        for pair in pairs:
                            pair_start = pair['start-date']
                            pair_finish = pair['finish-date']
                            if pair_start >= year_start and \
                                    pair_finish <= year_finish:
                                block_start = hh_max(pair_start, period_start)
                                block_finish = hh_min(
                                    pair_finish, period_finish)

                                if block_start <= block_finish:
                                    normal_days[meter_type] += (
                                        (
                                            block_finish - block_start
                                        ).total_seconds() +
                                        60 * 30) / (60 * 60 * 24)

                    # smooth
                    for i in range(1, len(pairs)):
                        pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \
                            - HH

                    # stretch
                    if pairs[0]['start-date'] > period_start:
                        pairs[0]['start-date'] = period_start

                    if pairs[-1]['finish-date'] < period_finish:
                        pairs[-1]['finish-date'] = period_finish

                    # chop
                    pairs = [
                        pair for pair in pairs
                        if not pair['start-date'] > period_finish and
                        not pair['finish-date'] < period_start]

                    # squash
                    if pairs[0]['start-date'] < period_start:
                        pairs[0]['start-date'] = period_start

                    if pairs[-1]['finish-date'] > period_finish:
                        pairs[-1]['finish-date'] = period_finish

                    for pair in pairs:
                        pair_hhs = (
                            (
                                pair['finish-date'] - pair['start-date']
                            ).total_seconds() + 30 * 60) / (60 * 30)
                        pair['pair_hhs'] = pair_hhs
                        for tpr_code, pair_kwh in pair['tprs'].items():
                            total_kwh[meter_type] += pair_kwh * pair_hhs

                    breakdown += 'pairs - \n' + str(pairs)

                elif meter_type in ('hh', 'amr'):
                    period_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).
                        join(Channel).filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Channel.era == era,
                            HhDatum.start_date >= period_start,
                            HhDatum.start_date <= period_finish).order_by(
                                HhDatum.id))
                    year_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).
                        join(Channel).join(Era).filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Era.supply == supply,
                            HhDatum.start_date >= year_start,
                            HhDatum.start_date <= year_finish).order_by(
                                HhDatum.id))

                    period_sum_kwhs = sum(period_kwhs)
                    year_sum_kwhs = sum(year_kwhs)
                    period_len_kwhs = len(period_kwhs)
                    year_len_kwhs = len(year_kwhs)
                    total_kwh[meter_type] += period_sum_kwhs
                    period_hhs = (
                        period_finish + HH - period_start
                        ).total_seconds() / (60 * 30)
                    if year_len_kwhs > 0:
                        filled_kwh[meter_type] += year_sum_kwhs / \
                            year_len_kwhs * (period_hhs - period_len_kwhs)
                    normal_days[meter_type] += sess.query(
                        func.count(HhDatum.value)).join(Channel). \
                        filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Channel.era == era,
                            HhDatum.start_date >= period_start,
                            HhDatum.start_date <= period_finish,
                            HhDatum.status == 'A').one()[0] / 48
                elif meter_type == 'unmetered':
                    bills = sess.query(Bill).filter(
                        Bill.supply == supply,
                        Bill.finish_date >= period_start,
                        Bill.start_date <= period_finish)
                    for bill in bills:
                        total_kwh[meter_type] += kwh
                    normal_days[meter_type] += (
                        (
                            period_finish - period_start).total_seconds() +
                        60 * 30) / (60 * 60 * 24)

            # for full year 183
            total_normal_days = sum(normal_days.values())
            total_max_normal_days = sum(max_normal_days.values())
            is_normal = total_normal_days / total_max_normal_days >= 183 / 365

            f.write(
                ','.join(
                    '"' + str(val) + '"' for val in
                    [
                        supply.id, mpan_core, site.code, site.name,
                        hh_format(year_start), hh_format(year_finish),
                        breakdown] +
                    [
                        normal_days[type] for type in meter_types] +
                    [
                        max_normal_days[type] for type in meter_types] +
                    [
                        total_normal_days, total_max_normal_days,
                        "Actual" if is_normal else "Estimated"] +
                    [total_kwh[type] for type in meter_types] +
                    [filled_kwh[type] for type in ('hh', 'amr')] +
                    [sum(total_kwh.values()) + sum(filled_kwh.values()), '']) +
                '\n')

            # avoid a long running transaction
            sess.rollback()
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        f.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #15
0
def content(start_date, finish_date, contract_id, user):
    caches = {}
    sess = supply_source = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "mop_virtual_bills.csv", user)
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")
        contract = Contract.get_mop_by_id(sess, contract_id)

        forecast_date = chellow.computer.forecast_date()
        header_titles = [
            "Import MPAN Core",
            "Export MPAN Core",
            "Start Date",
            "Finish Date",
        ]

        bill_titles = chellow.computer.contract_func(caches, contract,
                                                     "virtual_bill_titles")()
        writer.writerow(header_titles + bill_titles)
        vb_func = chellow.computer.contract_func(caches, contract,
                                                 "virtual_bill")

        for era in (sess.query(Era).filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                Era.mop_contract == contract,
        ).order_by(Era.imp_mpan_core, Era.exp_mpan_core, Era.start_date)):
            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            import_mpan_core = era.imp_mpan_core
            if import_mpan_core is None:
                import_mpan_core_str = ""
            else:
                is_import = True
                import_mpan_core_str = import_mpan_core

            export_mpan_core = era.exp_mpan_core
            if export_mpan_core is None:
                export_mpan_core_str = ""
            else:
                is_import = False
                export_mpan_core_str = export_mpan_core

            out = [
                import_mpan_core_str,
                export_mpan_core_str,
                hh_format(chunk_start),
                hh_format(chunk_finish),
            ]
            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.mop_bill
            for title in bill_titles:
                if title in bill:
                    out.append(make_val(bill[title]))
                    del bill[title]
                else:
                    out.append("")
            for k in sorted(bill.keys()):
                out.append(k)
                out.append(str(bill[k]))
            writer.writerow(out)
    except BadRequest as e:
        msg = "Problem "
        if supply_source is not None:
            msg += ("with supply " + supply_source.mpan_core +
                    " starting at " + hh_format(supply_source.start_date) +
                    " ")
        msg += str(e)
        sys.stderr.write(msg)
        writer.writerow([msg])
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #16
0
def content(year, month, months, supply_id, user):
    tmp_file = sess = None
    try:
        sess = Session()
        supplies = (sess.query(Supply).join(Era).distinct().options(
            joinedload(Supply.generator_type)))

        if supply_id is None:
            base_name = ("supplies_monthly_duration_for_all_supplies_for_" +
                         str(months) + "_to_" + str(year) + "_" + str(month) +
                         ".csv")
        else:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Supply.id == supply.id)
            base_name = ("supplies_monthly_duration_for_" + str(supply.id) +
                         "_" + str(months) + "_to_" + str(year) + "_" +
                         str(month) + ".csv")
        running_name, finished_name = chellow.dloads.make_names(
            base_name, user)

        tmp_file = open(running_name, "w")

        caches = {}

        start_date = Datetime(
            year, month, 1, tzinfo=pytz.utc) - relativedelta(months=months - 1)

        field_names = (
            "supply-name",
            "source-code",
            "generator-type",
            "month",
            "pc-code",
            "msn",
            "site-code",
            "site-name",
            "metering-type",
            "import-mpan-core",
            "metered-import-kwh",
            "metered-import-net-gbp",
            "metered-import-estimated-kwh",
            "billed-import-kwh",
            "billed-import-net-gbp",
            "export-mpan-core",
            "metered-export-kwh",
            "metered-export-estimated-kwh",
            "billed-export-kwh",
            "billed-export-net-gbp",
            "problem",
            "timestamp",
        )

        tmp_file.write("supply-id," + ",".join(field_names) + "\n")

        forecast_date = chellow.computer.forecast_date()

        for i in range(months):
            month_start = start_date + relativedelta(months=i)
            month_finish = month_start + relativedelta(months=1) - HH

            for supply in supplies.filter(
                    Era.start_date <= month_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= month_start),
            ):

                generator_type = supply.generator_type
                if generator_type is None:
                    generator_type = ""
                else:
                    generator_type = generator_type.code

                source_code = supply.source.code
                eras = supply.find_eras(sess, month_start, month_finish)
                era = eras[-1]
                metering_type = era.meter_category

                site = (sess.query(Site).join(SiteEra).filter(
                    SiteEra.era == era, SiteEra.is_physical == true()).one())

                values = {
                    "supply-name": supply.name,
                    "source-code": source_code,
                    "generator-type": generator_type,
                    "month": hh_format(month_finish),
                    "pc-code": era.pc.code,
                    "msn": era.msn,
                    "site-code": site.code,
                    "site-name": site.name,
                    "metering-type": metering_type,
                    "problem": "",
                }

                tmp_file.write(str(supply.id) + ",")

                for is_import, pol_name in [(True, "import"),
                                            (False, "export")]:
                    if is_import:
                        mpan_core = era.imp_mpan_core
                    else:
                        mpan_core = era.exp_mpan_core

                    values[pol_name + "-mpan-core"] = mpan_core
                    kwh = 0
                    est_kwh = 0

                    if metering_type in ["hh", "amr"]:
                        est_kwh = (sess.query(
                            HhDatum.value).join(Channel).join(Era).filter(
                                HhDatum.status == "E",
                                Era.supply_id == supply.id,
                                Channel.channel_type == "ACTIVE",
                                Channel.imp_related == is_import,
                                HhDatum.start_date >= month_start,
                                HhDatum.start_date <= month_finish,
                            ).first())
                        if est_kwh is None:
                            est_kwh = 0
                        else:
                            est_kwh = est_kwh[0]

                    if not (is_import and source_code in ("net", "gen-net")):
                        kwh_sum = (sess.query(
                            cast(func.sum(HhDatum.value),
                                 Float)).join(Channel).join(Era).filter(
                                     Era.supply_id == supply.id,
                                     Channel.channel_type == "ACTIVE",
                                     Channel.imp_related == is_import,
                                     HhDatum.start_date >= month_start,
                                     HhDatum.start_date <= month_finish,
                                 ).one()[0])
                        if kwh_sum is not None:
                            kwh += kwh_sum

                    values["metered-" + pol_name + "-estimated-kwh"] = est_kwh
                    values["metered-" + pol_name + "-kwh"] = kwh
                    values["metered-" + pol_name + "-net-gbp"] = 0
                    values["billed-" + pol_name + "-kwh"] = 0
                    values["billed-" + pol_name + "-net-gbp"] = 0
                    values["billed-" + pol_name + "-apportioned-kwh"] = 0
                    values["billed-" + pol_name + "-apportioned-net-gbp"] = 0
                    values["billed-" + pol_name + "-raw-kwh"] = 0
                    values["billed-" + pol_name + "-raw-net-gbp"] = 0

                for bill in sess.query(Bill).filter(
                        Bill.supply == supply,
                        Bill.start_date <= month_finish,
                        Bill.finish_date >= month_start,
                ):
                    bill_start = bill.start_date
                    bill_finish = bill.finish_date
                    bill_duration = (bill_finish -
                                     bill_start).total_seconds() + 30 * 60
                    overlap_duration = (min(bill_finish, month_finish) - max(
                        bill_start, month_start)).total_seconds() + 30 * 60
                    overlap_proportion = float(overlap_duration) / float(
                        bill_duration)
                    values[
                        "billed-import-net-gbp"] += overlap_proportion * float(
                            bill.net)
                    values["billed-import-kwh"] += overlap_proportion * float(
                        bill.kwh)

                for era in eras:
                    chunk_start = hh_max(era.start_date, month_start)
                    chunk_finish = hh_min(era.finish_date, month_finish)

                    import_mpan_core = era.imp_mpan_core
                    if import_mpan_core is None:
                        continue

                    supplier_contract = era.imp_supplier_contract

                    if source_code in ["net", "gen-net", "3rd-party"]:
                        supply_source = chellow.computer.SupplySource(
                            sess,
                            chunk_start,
                            chunk_finish,
                            forecast_date,
                            era,
                            True,
                            caches,
                        )

                        values["metered-import-kwh"] += sum(
                            datum["msp-kwh"]
                            for datum in supply_source.hh_data)

                        import_vb_function = supply_source.contract_func(
                            supplier_contract, "virtual_bill")
                        if import_vb_function is None:
                            values["problem"] += (
                                "Can't find the "
                                "virtual_bill  function in the supplier "
                                "contract. ")
                        else:
                            import_vb_function(supply_source)
                            values[
                                "metered-import-net-gbp"] += supply_source.supplier_bill[
                                    "net-gbp"]

                        supply_source.contract_func(
                            era.dc_contract, "virtual_bill")(supply_source)
                        values[
                            "metered-import-net-gbp"] += supply_source.dc_bill[
                                "net-gbp"]

                        mop_func = supply_source.contract_func(
                            era.mop_contract, "virtual_bill")
                        if mop_func is None:
                            values["problem"] += (" MOP virtual_bill "
                                                  "function can't be found.")
                        else:
                            mop_func(supply_source)
                            mop_bill = supply_source.mop_bill
                            values["metered-import-net-gbp"] += mop_bill[
                                "net-gbp"]
                            if len(mop_bill["problem"]) > 0:
                                values["problem"] += (
                                    " MOP virtual bill problem: " +
                                    mop_bill["problem"])

                values["timestamp"] = int(time.time() * 1000)
                tmp_file.write(",".join('"' + str(values[name]) + '"'
                                        for name in field_names) + "\n")
    except BaseException:
        tmp_file.write(traceback.format_exc())
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)
Exemple #17
0
def https_handler(sess, log_f, properties, contract, now=None):
    url_template_str = properties["url_template"]
    url_values = properties.get("url_values", {})
    download_days = properties["download_days"]
    if now is None:
        now = utc_datetime_now()
    window_finish = utc_datetime(now.year, now.month, now.day) - HH
    window_start = utc_datetime(now.year, now.month,
                                now.day) - Timedelta(days=download_days)
    log_f(f"Window start: {hh_format(window_start)}")
    log_f(f"Window finish: {hh_format(window_finish)}")
    env = jinja2.Environment(autoescape=True, undefined=jinja2.StrictUndefined)
    url_template = env.from_string(url_template_str)
    for era in (sess.query(Era).filter(
            Era.dc_contract == contract,
            Era.start_date <= window_finish,
            or_(Era.finish_date == null(), Era.finish_date >= window_start),
    ).distinct()):
        chunk_start = hh_max(era.start_date, window_start)
        chunk_finish = hh_min(era.finish_date, window_finish)
        for mpan_core in (era.imp_mpan_core, era.exp_mpan_core):
            if mpan_core is None:
                continue

            log_f(f"Looking at MPAN core {mpan_core}.")

            vals = {"chunk_start": chunk_start, "chunk_finish": chunk_finish}
            vals.update(url_values.get(mpan_core, {}))
            try:
                url = url_template.render(vals)
            except jinja2.exceptions.UndefinedError as e:
                raise BadRequest(
                    f"Problem rendering the URL template: {url_template_str}. "
                    f"The problem is: {e}. This can be fixed by editing the "
                    f"properties of this contract.")

            log_f(f"Retrieving data from {url}.")

            sess.rollback()  # Avoid long transactions
            res = requests.get(url, timeout=120)
            res.raise_for_status()
            result = requests.get(url, timeout=120).json()
            if isinstance(result, dict):
                result_data = result["DataPoints"]
            elif isinstance(result, list):
                result_data = result
            else:
                raise BadRequest(
                    f"Expecting a JSON object at the top level, but instead got "
                    f"{result}")
            raw_data = []
            for jdatum in result_data:
                raw_data.append(
                    dict(
                        mpan_core=mpan_core,
                        start_date=utc_datetime(1, 1, 1) +
                        Timedelta(seconds=jdatum["Time"] / 10000000),
                        channel_type="ACTIVE",
                        value=jdatum["Value"],
                        status="A",
                    ))
            HhDatum.insert(sess, raw_data, contract)
            sess.commit()
    log_f("Finished loading.")
    return False
Exemple #18
0
def content(running_name, finished_name, date, supply_id, mpan_cores):
    sess = None
    try:
        sess = Session()
        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')
        writer.writerow(
            (
                'Date', 'Physical Site Id', 'Physical Site Name',
                'Other Site Ids', 'Other Site Names', 'Supply Id', 'Source',
                'Generator Type', 'GSP Group', 'DNO Name', 'Voltage Level',
                'Metering Type', 'Mandatory HH', 'PC', 'MTC', 'CoP', 'SSC',
                'Number Of Registers', 'MOP Contract', 'Mop Account',
                'HHDC Contract', 'HHDC Account', 'Meter Serial Number',
                'Meter Installation Date', 'Latest Normal Meter Read Date',
                'Latest Normal Meter Read Type', 'Latest DC Bill Date',
                'Latest MOP Bill Date', 'Import ACTIVE?',
                'Import REACTIVE_IMPORT?', 'Import REACTIVE_EXPORT?',
                'Export ACTIVE?', 'Export REACTIVE_IMPORT?',
                'Export REACTIVE_EXPORT?', 'Import MPAN core',
                'Import Agreed Supply Capacity (kVA)', 'Import LLFC Code',
                'Import LLFC Description', 'Import Supplier Contract',
                'Import Supplier Account', 'Import Mandatory kW',
                'Latest Import Supplier Bill Date', 'Export MPAN core',
                'Export Agreed Supply Capacity (kVA)', 'Export LLFC Code',
                'Export LLFC Description', 'Export Supplier Contract',
                'Export Supplier Account', 'Export Mandatory kW',
                'Latest Export Supplier Bill Date'))

        NORMAL_READ_TYPES = ('N', 'C', 'N3')
        year_start = date + HH - relativedelta(years=1)

        eras = sess.query(Era, Supply, GeneratorType).join(Supply).outerjoin(
            GeneratorType).filter(
            Era.start_date <= date,
            or_(Era.finish_date == null(), Era.finish_date >= date)).order_by(
            Era.supply_id).options(joinedload(Era.site_eras))

        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)

            eras = eras.filter(Era.supply == supply)

        if mpan_cores is not None:
            eras = eras.filter(
                or_(
                    Era.imp_mpan_core.in_(mpan_cores),
                    Era.exp_mpan_core.in_(mpan_cores)))

        for era, supply, generator_type in eras:
            site_codes = []
            site_names = []
            for site_era in era.site_eras:
                if site_era.is_physical:
                    physical_site = site_era.site
                else:
                    site = site_era.site
                    site_codes.append(site.code)
                    site_names.append(site.name)

            if era.imp_mpan_core is None:
                voltage_level_code = era.exp_llfc.voltage_level.code
            else:
                voltage_level_code = era.imp_llfc.voltage_level.code

            if generator_type is None:
                generator_type_str = ''
            else:
                generator_type_str = generator_type.code

            metering_type = era.make_meter_category()

            if metering_type == 'nhh':
                latest_prev_normal_read = sess.query(RegisterRead). \
                    join(Bill).join(RegisterRead.previous_type).filter(
                        ReadType.code.in_(NORMAL_READ_TYPES),
                        RegisterRead.previous_date <= date,
                        Bill.supply_id == supply.id).order_by(
                        RegisterRead.previous_date.desc()).first()

                latest_pres_normal_read = sess.query(RegisterRead) \
                    .join(Bill).join(RegisterRead.present_type).filter(
                        ReadType.code.in_(NORMAL_READ_TYPES),
                        RegisterRead.present_date <= date,
                        Bill.supply == supply).order_by(
                        RegisterRead.present_date.desc()).first()

                if latest_prev_normal_read is None and \
                        latest_pres_normal_read is None:
                    latest_normal_read_date = None
                    latest_normal_read_type = None
                elif latest_pres_normal_read is not None and \
                        latest_prev_normal_read is None:
                    latest_normal_read_date = \
                        latest_pres_normal_read.present_date
                    latest_normal_read_type = \
                        latest_pres_normal_read.present_type.code
                elif latest_pres_normal_read is None and \
                        latest_prev_normal_read is not None:
                    latest_normal_read_date = \
                        latest_prev_normal_read.previous_date
                    latest_normal_read_type = \
                        latest_prev_normal_read.previous_type.code
                elif latest_pres_normal_read.present_date > \
                        latest_prev_normal_read.previous_date:
                    latest_normal_read_date = \
                        latest_pres_normal_read.present_date
                    latest_normal_read_type = \
                        latest_pres_normal_read.present_type.code
                else:
                    latest_normal_read_date = \
                        latest_prev_normal_read.previous_date
                    latest_normal_read_type = \
                        latest_prev_normal_read.previous_type.code
                if latest_normal_read_date is not None:
                    latest_normal_read_date = \
                        hh_format(latest_normal_read_date)

            else:
                latest_normal_read_date = metering_type
                latest_normal_read_type = None

            mop_contract = era.mop_contract
            if mop_contract is None:
                mop_contract_name = ''
                mop_account = ''
                latest_mop_bill_date = 'No MOP'
            else:
                mop_contract_name = mop_contract.name
                mop_account = era.mop_account
                latest_mop_bill_date = sess.query(Bill.finish_date) \
                    .join(Batch).filter(
                        Bill.start_date <= date, Bill.supply == supply,
                        Batch.contract == mop_contract).order_by(
                        Bill.finish_date.desc()).first()

                if latest_mop_bill_date is not None:
                    latest_mop_bill_date = hh_format(latest_mop_bill_date[0])

            hhdc_contract = era.hhdc_contract
            if hhdc_contract is None:
                hhdc_contract_name = ''
                hhdc_account = ''
                latest_hhdc_bill_date = 'No HHDC'
            else:
                hhdc_contract_name = hhdc_contract.name
                hhdc_account = era.hhdc_account
                latest_hhdc_bill_date = sess.query(Bill.finish_date) \
                    .join(Batch).filter(
                        Bill.start_date <= date, Bill.supply == supply,
                        Batch.contract == hhdc_contract).order_by(
                        Bill.finish_date.desc()).first()

                if latest_hhdc_bill_date is not None:
                    latest_hhdc_bill_date = hh_format(latest_hhdc_bill_date[0])

            channel_values = []
            for imp_related in [True, False]:
                for channel_type in CHANNEL_TYPES:
                    if era.find_channel(
                            sess, imp_related, channel_type) is None:
                        channel_values.append('false')
                    else:
                        channel_values.append('true')

            imp_avg_months = None
            exp_avg_months = None
            for is_import in [True, False]:
                if metering_type == 'nhh':
                    continue

                params = {
                    'supply_id': supply.id, 'year_start': year_start,
                    'year_finish': date,
                    'is_import': is_import}
                month_mds = tuple(
                    md[0] * 2 for md in sess.execute("""

    select max(hh_datum.value) as md
    from hh_datum join channel on (hh_datum.channel_id = channel.id)
        join era on (channel.era_id = era.id)
    where era.supply_id = :supply_id and hh_datum.start_date >= :year_start
        and hh_datum.start_date <= :year_finish
        and channel.channel_type = 'ACTIVE'
        and channel.imp_related = :is_import
    group by extract(month from (hh_datum.start_date at time zone 'utc'))
    order by md desc
    limit 3

    """, params=params))

                avg_months = sum(month_mds)
                if len(month_mds) > 0:
                    avg_months /= len(month_mds)
                    if is_import:
                        imp_avg_months = avg_months
                    else:
                        exp_avg_months = avg_months

            if (imp_avg_months is not None and imp_avg_months > 100) or \
                    (exp_avg_months is not None and exp_avg_months > 100):
                mandatory_hh = 'yes'
            else:
                mandatory_hh = 'no'

            imp_latest_supplier_bill_date = None
            exp_latest_supplier_bill_date = None
            for is_import in [True, False]:
                for er in sess.query(Era).filter(
                            Era.supply == era.supply,
                            Era.start_date <= date).order_by(
                            Era.start_date.desc()):
                    if is_import:
                        if er.imp_mpan_core is None:
                            break
                        else:
                            supplier_contract = er.imp_supplier_contract
                    else:
                        if er.exp_mpan_core is None:
                            break
                        else:
                            supplier_contract = er.exp_supplier_contract

                    latest_bill_date = sess.query(Bill.finish_date) \
                        .join(Batch).filter(
                            Bill.finish_date >= er.start_date,
                            Bill.finish_date <= hh_min(er.finish_date, date),
                            Bill.supply == supply,
                            Batch.contract == supplier_contract).order_by(
                            Bill.finish_date.desc()).first()

                    if latest_bill_date is not None:
                        latest_bill_date = hh_format(latest_bill_date[0])

                        if is_import:
                            imp_latest_supplier_bill_date = latest_bill_date
                        else:
                            exp_latest_supplier_bill_date = latest_bill_date
                        break

            meter_installation_date = sess.query(func.min(Era.start_date)) \
                .filter(Era.supply == era.supply, Era.msn == era.msn).one()[0]

            if era.ssc is None:
                ssc_code = num_registers = None
            else:
                ssc_code = era.ssc.code
                num_registers = sess.query(MeasurementRequirement).filter(
                    MeasurementRequirement.ssc == era.ssc).count()

            writer.writerow(
                (
                    ('' if value is None else str(value))) for value in [
                    hh_format(date), physical_site.code, physical_site.name,
                    ', '.join(site_codes), ', '.join(site_names), supply.id,
                    supply.source.code, generator_type_str,
                    supply.gsp_group.code, supply.dno_contract.name,
                    voltage_level_code, metering_type, mandatory_hh,
                    era.pc.code, era.mtc.code, era.cop.code, ssc_code,
                    num_registers, mop_contract_name, mop_account,
                    hhdc_contract_name, hhdc_account, era.msn,
                    hh_format(meter_installation_date),
                    latest_normal_read_date, latest_normal_read_type,
                    latest_hhdc_bill_date, latest_mop_bill_date] +
                channel_values + [
                    era.imp_mpan_core, era.imp_sc,
                    None if era.imp_llfc is None else era.imp_llfc.code,
                    None if era.imp_llfc is None else era.imp_llfc.description,
                    None if era.imp_supplier_contract is None else
                    era.imp_supplier_contract.name,
                    era.imp_supplier_account, imp_avg_months,
                    imp_latest_supplier_bill_date] + [
                    era.exp_mpan_core, era.exp_sc,
                    None if era.exp_llfc is None else era.exp_llfc.code,
                    None if era.exp_llfc is None else era.exp_llfc.description,
                    None if era.exp_supplier_contract is None else
                    era.exp_supplier_contract.name, era.exp_supplier_account,
                    exp_avg_months, exp_latest_supplier_bill_date])
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #19
0
def content(contract_id, end_year, end_month, months, user):
    caches = {}
    sess = f = None
    try:
        sess = Session()
        contract = Contract.get_hhdc_by_id(sess, contract_id)

        finish_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) + \
            relativedelta(months=1) - HH

        start_date = Datetime(end_year, end_month, 1, tzinfo=pytz.utc) - \
            relativedelta(months=months - 1)

        forecast_date = chellow.computer.forecast_date()
        running_name, finished_name = chellow.dloads.make_names(
            'hhdc_virtual_bills.csv', user)

        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')

        bill_titles = chellow.computer.contract_func(
            caches, contract, 'virtual_bill_titles')()
        header_titles = [
            'Import MPAN Core', 'Export MPAN Core', 'Start Date',
            'Finish Date']

        vb_func = chellow.computer.contract_func(
            caches, contract, 'virtual_bill')

        writer.writerow(header_titles + bill_titles)

        for era in sess.query(Era).distinct().filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                Era.hhdc_contract == contract).order_by(Era.supply_id):
            imp_mpan_core = era.imp_mpan_core
            if imp_mpan_core is None:
                imp_mpan_core_str = ''
                is_import = False
            else:
                is_import = True
                imp_mpan_core_str = imp_mpan_core

            exp_mpan_core = era.exp_mpan_core
            exp_mpan_core_str = '' if exp_mpan_core is None else exp_mpan_core

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)

            vals = [
                imp_mpan_core_str, exp_mpan_core_str, hh_format(chunk_start),
                hh_format(chunk_finish)]

            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.dc_bill
            for title in bill_titles:
                vals.append(str(bill.get(title, '')))
                if title in bill:
                    del bill[title]

            for k in sorted(bill.keys()):
                vals.append(k)
                vals.append(str(bill[k]))
            writer.writerow(vals)
    except BadRequest as e:
        f.write("Problem " + e.description + traceback.format_exc() + '\n')
    except:
        msg = "Problem " + traceback.format_exc() + '\n'
        f.write(msg)
    finally:
        f.close()
        os.rename(running_name, finished_name)
        if sess is not None:
            sess.close()
Exemple #20
0
def content(batch_id, bill_id, contract_id, start_date, finish_date, user):
    caches = {}
    tmp_file = sess = bill = None
    forecast_date = to_utc(Datetime.max)
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'bill_check.csv', user)
        tmp_file = open(running_name, mode='w', newline='')
        writer = csv.writer(tmp_file, lineterminator='\n')
        bills = sess.query(Bill).order_by(
            Bill.supply_id, Bill.reference).options(
            joinedload(Bill.supply),
            subqueryload(Bill.reads).joinedload(RegisterRead.present_type),
            subqueryload(Bill.reads).joinedload(RegisterRead.previous_type),
            joinedload(Bill.batch))
        if batch_id is not None:
            batch = Batch.get_by_id(sess, batch_id)
            bills = bills.filter(Bill.batch == batch)
            contract = batch.contract
        elif bill_id is not None:
            bill = Bill.get_by_id(sess, bill_id)
            bills = bills.filter(Bill.id == bill.id)
            contract = bill.batch.contract
        elif contract_id is not None:
            contract = Contract.get_by_id(sess, contract_id)
            bills = bills.join(Batch).filter(
                Batch.contract == contract, Bill.start_date <= finish_date,
                Bill.finish_date >= start_date)

        market_role_code = contract.market_role.code
        vbf = chellow.computer.contract_func(caches, contract, 'virtual_bill')
        if vbf is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill.")

        virtual_bill_titles_func = chellow.computer.contract_func(
            caches, contract, 'virtual_bill_titles')
        if virtual_bill_titles_func is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill_titles.")
        virtual_bill_titles = virtual_bill_titles_func()

        titles = [
            'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp',
            'bill-vat-gbp', 'bill-start-date', 'bill-finish-date',
            'imp-mpan-core', 'exp-mpan-core', 'site-code', 'site-name',
            'covered-from', 'covered-to', 'covered-bills', 'metered-kwh']
        for t in virtual_bill_titles:
            titles.append('covered-' + t)
            titles.append('virtual-' + t)
            if t.endswith('-gbp'):
                titles.append('difference-' + t)

        writer.writerow(titles)

        bill_map = defaultdict(set, {})
        for bill in bills:
            bill_map[bill.supply.id].add(bill.id)

        for supply_id, bill_ids in bill_map.items():
            gaps = {}
            data_sources = {}

            while len(bill_ids) > 0:
                bill_id = list(sorted(bill_ids))[0]
                bill_ids.remove(bill_id)
                bill = sess.query(Bill).filter(Bill.id == bill_id).options(
                    joinedload(Bill.batch),
                    joinedload(Bill.bill_type),
                    joinedload(Bill.reads),
                    joinedload(Bill.supply),
                    joinedload(Bill.reads).joinedload(
                        RegisterRead.present_type),
                    joinedload(Bill.reads).joinedload(
                        RegisterRead.previous_type)).one()
                virtual_bill = {'problem': ''}
                supply = bill.supply

                read_dict = {}
                for read in bill.reads:
                    gen_start = read.present_date.replace(hour=0).replace(
                        minute=0)
                    gen_finish = gen_start + relativedelta(days=1) - HH
                    msn_match = False
                    read_msn = read.msn
                    for read_era in supply.find_eras(
                            sess, gen_start, gen_finish):
                        if read_msn == read_era.msn:
                            msn_match = True
                            break

                    if not msn_match:
                        virtual_bill['problem'] += "The MSN " + read_msn + \
                            " of the register read " + str(read.id) + \
                            " doesn't match the MSN of the era."

                    for dt, typ in [
                            (read.present_date, read.present_type),
                            (read.previous_date, read.previous_type)]:
                        key = str(dt) + "-" + read.msn
                        try:
                            if typ != read_dict[key]:
                                virtual_bill['problem'] += " Reads taken " + \
                                    "on " + str(dt) + \
                                    " have differing read types."
                        except KeyError:
                            read_dict[key] = typ

                bill_start = bill.start_date
                bill_finish = bill.finish_date

                covered_start = bill_start
                covered_finish = bill_finish
                covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0}

                vb_elems = set()
                enlarged = True

                while enlarged:
                    enlarged = False
                    covered_elems = find_elements(bill)
                    covered_bills = OrderedDict(
                        (b.id, b) for b in sess.query(Bill).join(Batch).
                        join(Contract).join(MarketRole).filter(
                            Bill.supply == supply,
                            Bill.start_date <= covered_finish,
                            Bill.finish_date >= covered_start,
                            MarketRole.code == market_role_code).order_by(
                                Bill.start_date, Bill.issue_date))
                    while True:
                        to_del = None
                        for a, b in combinations(covered_bills.values(), 2):
                            if all(
                                    (
                                        a.start_date == b.start_date,
                                        a.finish_date == b.finish_date,
                                        a.kwh == -1 * b.kwh,
                                        a.net == -1 * b.net,
                                        a.vat == -1 * b.vat,
                                        a.gross == -1 * b.gross)):
                                to_del = (a.id, b.id)
                                break
                        if to_del is None:
                            break
                        else:
                            for k in to_del:
                                del covered_bills[k]

                    for k, covered_bill in tuple(covered_bills.items()):
                        elems = find_elements(covered_bill)
                        if elems.isdisjoint(covered_elems):
                            if k != bill.id:
                                del covered_bills[k]
                                continue
                        else:
                            covered_elems.update(elems)

                        if covered_bill.start_date < covered_start:
                            covered_start = covered_bill.start_date
                            enlarged = True
                            break

                        if covered_bill.finish_date > covered_finish:
                            covered_finish = covered_bill.finish_date
                            enlarged = True
                            break

                if len(covered_bills) == 0:
                    continue

                primary_covered_bill = None
                for covered_bill in covered_bills.values():
                    if covered_bill.id in bill_ids:
                        bill_ids.remove(covered_bill.id)
                    covered_bdown['net-gbp'] += float(covered_bill.net)
                    covered_bdown['vat-gbp'] += float(covered_bill.vat)
                    covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh)
                    covered_rates = defaultdict(set)
                    for k, v in loads(covered_bill.breakdown).items():
                        if k in ('raw_lines', 'raw-lines'):
                            continue

                        if isinstance(v, list):
                            covered_rates[k].update(set(v))
                        else:
                            if isinstance(v, Decimal):
                                v = float(v)
                            try:
                                covered_bdown[k] += v
                            except KeyError:
                                covered_bdown[k] = v
                            except TypeError as detail:
                                raise BadRequest(
                                    "For key " + str(k) + " in " + str(
                                        [
                                            b.id for b in
                                            covered_bills.values()
                                        ]) + " the value " + str(v) +
                                    " can't be added to the existing value " +
                                    str(covered_bdown[k]) + ". " + str(detail))

                            if k.endswith('-gbp'):
                                elem = k[:-4]
                                covered_elems.add(elem)
                                add_gap(
                                    caches, gaps, elem,
                                    covered_bill.start_date,
                                    covered_bill.finish_date, False, v)

                    for k, v in covered_rates.items():
                        covered_bdown[k] = v.pop() if len(v) == 1 else None

                    if primary_covered_bill is None or (
                            (
                                covered_bill.finish_date -
                                covered_bill.start_date) > (
                                primary_covered_bill.finish_date -
                                primary_covered_bill.start_date)):
                        primary_covered_bill = covered_bill

                metered_kwh = 0
                for era in sess.query(Era).filter(
                        Era.supply == supply, Era.start_date <= covered_finish,
                        or_(
                            Era.finish_date == null(),
                            Era.finish_date >= covered_start)
                        ).distinct().options(
                        joinedload(Era.channels),
                        joinedload(Era.cop),
                        joinedload(Era.dc_contract),
                        joinedload(Era.exp_llfc),
                        joinedload(Era.exp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.exp_supplier_contract),
                        joinedload(Era.imp_llfc),
                        joinedload(Era.imp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.imp_supplier_contract),
                        joinedload(Era.mop_contract),
                        joinedload(Era.mtc).joinedload(Mtc.meter_type),
                        joinedload(Era.pc),
                        joinedload(Era.supply).joinedload(Supply.dno),
                        joinedload(Era.supply).joinedload(Supply.gsp_group),
                        joinedload(Era.supply).joinedload(Supply.source)):

                    chunk_start = hh_max(covered_start, era.start_date)
                    chunk_finish = hh_min(covered_finish, era.finish_date)

                    if contract not in (
                            era.mop_contract, era.dc_contract,
                            era.imp_supplier_contract,
                            era.exp_supplier_contract):
                        virtual_bill['problem'] += ''.join(
                            (
                                "From ", hh_format(chunk_start), " to ",
                                hh_format(chunk_finish), " the contract of ",
                                "the era doesn't match the contract of the ",
                                "bill."))
                        continue

                    if contract.market_role.code == 'X':
                        polarity = contract != era.exp_supplier_contract
                    else:
                        polarity = era.imp_supplier_contract is not None
                    '''
                    pairs = []
                    last_finish = chunk_start - HH
                    for hd in chellow.computer.datum_range(
                            sess, caches, 0, chunk_start, chunk_finish):
                        if hd['utc-is-month-end'] or hd['ct-is-month-end']:
                            end_date = hd['start-date']
                            pairs.append((last_finish + HH, end_date))
                            last_finish = end_date
                    if hd['start-date'] > last_finish:
                        pairs.append((last_finish + HH, hd['start-date']))

                    for ss_start, ss_finish in pairs:
                    '''
                    try:
                        ds_key = (
                            chunk_start, chunk_finish, forecast_date, era.id,
                            polarity, primary_covered_bill.id)
                        data_source = data_sources[ds_key]
                    except KeyError:
                        data_source = data_sources[ds_key] = \
                            chellow.computer.SupplySource(
                            sess, chunk_start, chunk_finish, forecast_date,
                            era, polarity, caches, primary_covered_bill)
                        vbf(data_source)

                    if data_source.measurement_type == 'hh':
                        metered_kwh += sum(
                            h['msp-kwh'] for h in data_source.hh_data)
                    else:
                        ds = chellow.computer.SupplySource(
                            sess, chunk_start, chunk_finish, forecast_date,
                            era, polarity, caches)
                        metered_kwh += sum(
                            h['msp-kwh'] for h in ds.hh_data)

                    if market_role_code == 'X':
                        vb = data_source.supplier_bill
                    elif market_role_code == 'C':
                        vb = data_source.dc_bill
                    elif market_role_code == 'M':
                        vb = data_source.mop_bill
                    else:
                        raise BadRequest("Odd market role.")

                    for k, v in vb.items():
                        try:
                            if isinstance(v, set):
                                virtual_bill[k].update(v)
                            else:
                                virtual_bill[k] += v
                        except KeyError:
                            virtual_bill[k] = v
                        except TypeError as detail:
                            raise BadRequest(
                                "For key " + str(k) + " and value " +
                                str(v) + ". " + str(detail))

                        if all((k.endswith('-gbp'), k != 'net-gbp', v != 0)):
                            add_gap(
                                caches, gaps, k[:-4], chunk_start,
                                chunk_finish, True, v)

                    for k in virtual_bill.keys():
                        if k.endswith('-gbp'):
                            vb_elems.add(k[:-4])

                long_map = {}
                vb_keys = set(virtual_bill.keys())
                for elem in sorted(vb_elems, key=len, reverse=True):
                    els = long_map[elem] = set()
                    for k in tuple(vb_keys):
                        if k.startswith(elem + '-'):
                            els.add(k)
                            vb_keys.remove(k)

                for elem in vb_elems.difference(covered_elems):
                    for k in long_map[elem]:
                        del virtual_bill[k]

                try:
                    del virtual_bill['net-gbp']
                except KeyError:
                    pass

                virtual_bill['net-gbp'] = sum(
                    v for k, v in virtual_bill.items() if k.endswith('-gbp'))

                era = supply.find_era_at(sess, bill_finish)
                if era is None:
                    imp_mpan_core = exp_mpan_core = None
                    site_code = site_name = None
                    virtual_bill['problem'] += \
                        "This bill finishes before or after the supply. "
                else:
                    imp_mpan_core = era.imp_mpan_core
                    exp_mpan_core = era.exp_mpan_core

                    site = sess.query(Site).join(SiteEra).filter(
                        SiteEra.is_physical == true(),
                        SiteEra.era == era).one()
                    site_code = site.code
                    site_name = site.name

                # Find bill to use for header data
                if bill.id not in covered_bills:
                    for cbill in covered_bills.values():
                        if bill.batch == cbill.batch:
                            bill = cbill

                values = [
                    bill.batch.reference, bill.reference, bill.bill_type.code,
                    bill.kwh, bill.net, bill.vat, hh_format(bill_start),
                    hh_format(bill_finish), imp_mpan_core, exp_mpan_core,
                    site_code, site_name, hh_format(covered_start),
                    hh_format(covered_finish), ':'.join(
                        str(i).replace(',', '') for i in covered_bills.keys()),
                    metered_kwh]

                for title in virtual_bill_titles:
                    try:
                        cov_val = covered_bdown[title]
                        values.append(cov_val)
                        del covered_bdown[title]
                    except KeyError:
                        cov_val = None
                        values.append('')

                    try:
                        virt_val = csv_make_val(virtual_bill[title])
                        values.append(virt_val)
                        del virtual_bill[title]
                    except KeyError:
                        virt_val = 0
                        values.append('')

                    if title.endswith('-gbp'):
                        if isinstance(virt_val, (int, float, Decimal)):
                            if isinstance(cov_val, (int, float, Decimal)):
                                values.append(float(cov_val) - float(virt_val))
                            else:
                                values.append(0 - float(virt_val))
                        else:
                            values.append('')

                for title in sorted(virtual_bill.keys()):
                    virt_val = csv_make_val(virtual_bill[title])
                    values += ['virtual-' + title, virt_val]
                    if title in covered_bdown:
                        values += ['covered-' + title, covered_bdown[title]]
                    else:
                        values += ['', '']

                writer.writerow(values)

                for bill in sess.query(Bill).filter(
                        Bill.supply == supply,
                        Bill.start_date <= covered_finish,
                        Bill.finish_date >= covered_start):

                    for k, v in loads(bill.breakdown).items():
                        if k.endswith('-gbp'):
                            add_gap(
                                caches, gaps, k[:-4], bill.start_date,
                                bill.finish_date, False, v)

                # Avoid long-running transactions
                sess.rollback()

            clumps = []
            for element, elgap in sorted(gaps.items()):
                for start_date, hhgap in sorted(elgap.items()):
                    if hhgap['has_virtual'] and not hhgap['has_covered']:

                        if len(clumps) == 0 or not all(
                                (
                                    clumps[-1]['element'] == element,
                                    clumps[-1]['finish_date'] + HH ==
                                    start_date)):
                            clumps.append(
                                {
                                    'element': element,
                                    'start_date': start_date,
                                    'finish_date': start_date,
                                    'gbp': hhgap['gbp']})
                        else:
                            clumps[-1]['finish_date'] = start_date

            for i, clump in enumerate(clumps):
                vals = dict((title, '') for title in titles)
                vals['covered-problem'] = '_'.join(
                    (
                        'missing', clump['element'], 'supplyid',
                        str(supply.id), 'from',
                        hh_format(clump['start_date'])))
                vals['imp-mpan-core'] = imp_mpan_core
                vals['exp-mpan-core'] = exp_mpan_core
                vals['batch'] = 'missing_bill'
                vals['bill-start-date'] = hh_format(clump['start_date'])
                vals['bill-finish-date'] = hh_format(clump['finish_date'])
                vals['difference-net-gbp'] = clump['gbp']
                writer.writerow(vals[title] for title in titles)

            # Avoid long-running transactions
            sess.rollback()

    except BadRequest as e:
        if bill is None:
            prefix = "Problem: "
        else:
            prefix = "Problem with bill " + str(bill.id) + ':'
        tmp_file.write(prefix + e.description)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        tmp_file.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)
Exemple #21
0
    def https_handler(self, sess, properties, contract):
        url_template_str = properties['url_template']
        url_values = properties.get('url_values', {})
        download_days = properties['download_days']
        now = utc_datetime_now()
        window_finish = utc_datetime(now.year, now.month, now.day) - HH
        window_start = utc_datetime(now.year, now.month,
                                    now.day) - Timedelta(days=download_days)
        self.log("Window start: " + hh_format(window_start))
        self.log("Window finish: " + hh_format(window_finish))
        env = jinja2.Environment(autoescape=True,
                                 undefined=jinja2.StrictUndefined)
        url_template = env.from_string(url_template_str)
        for era in sess.query(Era).filter(
                Era.dc_contract == contract, Era.start_date <= window_finish,
                or_(Era.finish_date == null(),
                    Era.finish_date >= window_start)).distinct():
            chunk_start = hh_max(era.start_date, window_start)
            chunk_finish = hh_min(era.finish_date, window_finish)
            for mpan_core in (era.imp_mpan_core, era.exp_mpan_core):
                if mpan_core is None:
                    continue

                self.log("Looking at MPAN core {mpan_core}.".format(
                    mpan_core=mpan_core))

                vals = {
                    'chunk_start': chunk_start,
                    'chunk_finish': chunk_finish
                }
                vals.update(url_values.get(mpan_core, {}))
                try:
                    url = url_template.render(vals)
                except jinja2.exceptions.UndefinedError as e:
                    raise BadRequest(
                        "Problem rendering the URL template: " +
                        url_template_str + ". The problem is: " + str(e) +
                        ". This can be fixed by " +
                        "editing the properties of this contract.")

                self.log("Retrieving data from {url}.".format(url=url))
                res = requests.get(url)
                res.raise_for_status()
                result = requests.get(url).json()
                if isinstance(result, dict):
                    result_data = result['DataPoints']
                elif isinstance(result, list):
                    result_data = result
                else:
                    raise BadRequest(
                        "Expecting a JSON object at the top level, but "
                        "instead got " + str(result))
                raw_data = []
                for jdatum in result_data:
                    raw_data.append(
                        dict(mpan_core=mpan_core,
                             start_date=utc_datetime(1, 1, 1) +
                             Timedelta(seconds=jdatum['Time'] / 10000000),
                             channel_type='ACTIVE',
                             value=jdatum['Value'],
                             status='A'))
                HhDatum.insert(sess, raw_data, contract)
                sess.commit()
        self.log("Finished loading.")
        return False
Exemple #22
0
def content(start_date, finish_date, contract_id, user):
    caches = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'mop_virtual_bills.csv', user)
        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')
        contract = Contract.get_mop_by_id(sess, contract_id)

        forecast_date = chellow.computer.forecast_date()
        header_titles = [
            'Import MPAN Core', 'Export MPAN Core', 'Start Date',
            'Finish Date']

        bill_titles = chellow.computer.contract_func(
            caches, contract, 'virtual_bill_titles')()
        writer.writerow(header_titles + bill_titles)
        vb_func = chellow.computer.contract_func(
            caches, contract, 'virtual_bill')

        for era in sess.query(Era).filter(
                or_(
                    Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date, Era.mop_contract == contract). \
                order_by(Era.imp_mpan_core, Era.exp_mpan_core, Era.start_date):
            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            import_mpan_core = era.imp_mpan_core
            if import_mpan_core is None:
                import_mpan_core_str = ''
            else:
                is_import = True
                import_mpan_core_str = import_mpan_core

            export_mpan_core = era.exp_mpan_core
            if export_mpan_core is None:
                export_mpan_core_str = ''
            else:
                is_import = False
                export_mpan_core_str = export_mpan_core

            out = [
                import_mpan_core_str, export_mpan_core_str,
                hh_format(chunk_start), hh_format(chunk_finish)]
            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.mop_bill
            for title in bill_titles:
                if title in bill:
                    out.append(make_val(bill[title]))
                    del bill[title]
                else:
                    out.append('')
            for k in sorted(bill.keys()):
                out.append(k)
                out.append(str(bill[k]))
            writer.writerow(out)
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #23
0
    def __init__(self, sess, start_date, finish_date, forecast_date, g_era,
                 caches, g_bill):
        self.sess = sess
        self.caches = caches
        self.forecast_date = forecast_date
        self.start_date = start_date
        self.finish_date = finish_date
        times = get_times(sess, caches, start_date, finish_date, forecast_date)
        self.years_back = times['years-back']
        self.history_start = times['history-start']
        self.history_finish = times['history-finish']

        self.problem = ''
        self.bill = defaultdict(int, {'problem': ''})
        self.hh_data = []
        self.rate_sets = defaultdict(set)

        self.g_bill = g_bill
        if self.g_bill is not None:
            self.g_bill_start = g_bill.start_date
            self.g_bill_finish = g_bill.finish_date
            self.is_last_g_bill_gen = \
                not self.g_bill_finish < self.start_date and not \
                self.g_bill_finish > self.finish_date

        self.g_era = g_era
        self.g_supply = g_era.g_supply
        self.mprn = self.g_supply.mprn
        self.g_exit_zone_code = self.g_supply.g_exit_zone.code
        self.g_ldz_code = self.g_supply.g_exit_zone.g_ldz.code
        self.g_dn_code = self.g_supply.g_exit_zone.g_ldz.g_dn.code
        self.account = g_era.account
        self.g_contract = g_era.g_contract

        self.consumption_info = ''

        if self.years_back == 0:
            hist_g_eras = [self.g_era]
        else:
            hist_g_eras = sess.query(GEra).filter(
                GEra.g_supply == self.g_supply,
                GEra.start_date <= self.history_finish,
                or_(GEra.finish_date == null(),
                    GEra.finish_date >= self.history_start)).order_by(
                        GEra.start_date).all()
            if len(hist_g_eras) == 0:
                hist_g_eras = sess.query(GEra).filter(
                    GEra.g_supply == self.g_supply).order_by(
                        GEra.start_date).limit(1).all()

        g_cv_id = get_non_core_contract_id('g_cv')
        hist_map = {}

        for i, hist_g_era in enumerate(hist_g_eras):
            if self.history_start > hist_g_era.start_date:
                chunk_start = self.history_start
            else:
                if i == 0:
                    chunk_start = self.history_start
                else:
                    chunk_start = hist_g_era.start_date

            chunk_finish = hh_min(hist_g_era.finish_date, self.history_finish)
            if self.g_bill is None:
                read_list = []
                read_keys = set()
                pairs = []

                prior_pres_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType).join(
                        GRegisterRead.pres_type).filter(
                            GReadType.code.in_(ACTUAL_READ_TYPES),
                            GBill.g_supply == self.g_supply,
                            GRegisterRead.pres_date < chunk_start,
                            BillType.code != 'W').order_by(
                                GRegisterRead.pres_date.desc()))
                prior_prev_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType).join(
                        GRegisterRead.prev_type).filter(
                            GReadType.code.in_(ACTUAL_READ_TYPES),
                            GBill.g_supply == self.g_supply,
                            GRegisterRead.prev_date < chunk_start,
                            BillType.code != 'W').order_by(
                                GRegisterRead.prev_date.desc()))
                next_pres_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType).join(
                        GRegisterRead.pres_type).filter(
                            GReadType.code.in_(ACTUAL_READ_TYPES),
                            GBill.g_supply == self.g_supply,
                            GRegisterRead.pres_date >= chunk_start,
                            BillType.code != 'W').order_by(
                                GRegisterRead.pres_date))
                next_prev_g_reads = iter(
                    sess.query(GRegisterRead).join(GBill).join(BillType).join(
                        GRegisterRead.prev_type).filter(
                            GReadType.code.in_(ACTUAL_READ_TYPES),
                            GBill.g_supply == self.g_supply,
                            GRegisterRead.prev_date >= chunk_start,
                            BillType.code != 'W').order_by(
                                GRegisterRead.prev_date))

                for is_forwards in (False, True):
                    if is_forwards:
                        pres_g_reads = next_pres_g_reads
                        prev_g_reads = next_prev_g_reads
                        read_list.reverse()
                    else:
                        pres_g_reads = prior_pres_g_reads
                        prev_g_reads = prior_prev_g_reads

                    prime_pres_g_read = None
                    prime_prev_g_read = None
                    while True:
                        while prime_pres_g_read is None:
                            try:
                                pres_g_read = next(pres_g_reads)
                            except StopIteration:
                                break

                            pres_date = pres_g_read.pres_date
                            pres_msn = pres_g_read.msn
                            read_key = '_'.join([str(pres_date), pres_msn])
                            if read_key in read_keys:
                                continue

                            pres_g_bill = sess.query(GBill).join(
                                BillType).filter(
                                    GBill.g_supply == self.g_supply,
                                    GBill.finish_date >=
                                    pres_g_read.g_bill.start_date,
                                    GBill.start_date <=
                                    pres_g_read.g_bill.finish_date,
                                    BillType.code != 'W').order_by(
                                        GBill.issue_date.desc(),
                                        BillType.code).first()

                            if pres_g_bill != pres_g_read.g_bill:
                                continue

                            value = sess.query(
                                cast(GRegisterRead.pres_value, Float)).filter(
                                    GRegisterRead.g_bill == pres_g_bill,
                                    GRegisterRead.pres_date == pres_date,
                                    GRegisterRead.msn == pres_msn).scalar()

                            prime_pres_g_read = {
                                'date': pres_date,
                                'value': value,
                                'msn': pres_msn
                            }
                            read_keys.add(read_key)

                        while prime_prev_g_read is None:

                            try:
                                prev_g_read = next(prev_g_reads)
                            except StopIteration:
                                break

                            prev_date = prev_g_read.prev_date
                            prev_msn = prev_g_read.msn
                            read_key = '_'.join([str(prev_date), prev_msn])
                            if read_key in read_keys:
                                continue

                            prev_g_bill = sess.query(GBill).join(
                                BillType).filter(
                                    GBill.g_supply == self.g_supply,
                                    GBill.finish_date >=
                                    prev_g_read.g_bill.start_date,
                                    GBill.start_date <=
                                    prev_g_read.g_bill.finish_date,
                                    BillType.code != 'W').order_by(
                                        GBill.issue_date.desc(),
                                        BillType.code).first()
                            if prev_g_bill != prev_g_read.g_bill:
                                continue

                            value = sess.query(
                                cast(GRegisterRead.prev_value, Float)).filter(
                                    GRegisterRead.g_bill == prev_g_bill,
                                    GRegisterRead.prev_date == prev_date,
                                    GRegisterRead.msn == prev_msn).scalar()

                            prime_prev_g_read = {
                                'date': prev_date,
                                'value': value,
                                'msn': prev_msn
                            }
                            read_keys.add(read_key)

                        if prime_pres_g_read is None and \
                                prime_prev_g_read is None:
                            break
                        elif prime_pres_g_read is None:
                            read_list.append(prime_prev_g_read)
                            prime_prev_g_read = None
                        elif prime_prev_g_read is None:
                            read_list.append(prime_pres_g_read)
                            prime_pres_g_read = None
                        else:
                            if is_forwards:
                                if prime_prev_g_read['date'] == \
                                        prime_pres_g_read['date'] or \
                                        prime_pres_g_read['date'] < \
                                        prime_prev_g_read['date']:
                                    read_list.append(prime_pres_g_read)
                                    prime_pres_g_read = None
                                else:
                                    read_list.append(prime_prev_g_read)
                                    prime_prev_g_read = None
                            else:
                                if prime_prev_g_read['date'] == \
                                        prime_pres_g_read['date'] or \
                                        prime_prev_g_read['date'] > \
                                        prime_pres_g_read['date']:
                                    read_list.append(prime_prev_g_read)
                                    prime_prev_g_read = None
                                else:
                                    read_list.append(prime_pres_g_read)
                                    prime_pres_g_read = None

                        if len(read_list) > 1:
                            if is_forwards:
                                aft_read = read_list[-2]
                                fore_read = read_list[-1]
                            else:
                                aft_read = read_list[-1]
                                fore_read = read_list[-2]

                            if aft_read['msn'] == fore_read['msn']:
                                num_hh = (fore_read['date'] - aft_read['date']
                                          ).total_seconds() / (30 * 60)

                                units = fore_read['value'] - aft_read['value']

                                if units < 0:
                                    digits = int(math.log10(
                                        aft_read['value'])) + 1
                                    units = 10**digits + units

                                pairs.append({
                                    'start-date': aft_read['date'],
                                    'units': units / num_hh
                                })

                                if not is_forwards or (
                                        is_forwards and
                                        read_list[-1]['date'] > chunk_finish):
                                    break

                self.consumption_info += 'read list - \n' + dumps(read_list) \
                    + "\n"
                hhs = _find_hhs(sess, caches, hist_g_era, pairs, chunk_start,
                                chunk_finish, g_cv_id, self.g_ldz_code)
                hist_map.update(hhs)
                self.consumption_info += 'pairs - \n' + dumps(pairs)

            else:
                g_bills = []
                for cand_bill in sess.query(GBill).join(GBatch) \
                        .join(BillType).filter(
                            GBill.g_supply == self.g_supply,
                            GBill.g_reads.any(),
                            GBatch.g_contract == self.g_contract,
                            GBill.start_date <= chunk_finish,
                            GBill.finish_date >= chunk_start,
                            BillType.code != 'W').order_by(
                            GBill.issue_date.desc(), GBill.start_date):
                    can_insert = True
                    for g_bill in g_bills:
                        if not cand_bill.start_date > g_bill.finish_date \
                                and not cand_bill.finish_date < \
                                g_bill.start_date:
                            can_insert = False
                            break
                    if can_insert:
                        g_bills.append(cand_bill)

                for g_bill in g_bills:
                    units_consumed = 0
                    for prev_value, pres_value in sess.query(
                            cast(GRegisterRead.prev_value, Float),
                            cast(
                                GRegisterRead.pres_value,
                                Float)).filter(GRegisterRead.g_bill == g_bill):
                        units_diff = pres_value - prev_value
                        if units_diff < 0:
                            total_units = 10**len(str(int(prev_value)))
                            c_units = total_units - prev_value + pres_value
                            if c_units < abs(units_diff):
                                units_diff = c_units

                        units_consumed += units_diff

                    bill_s = (g_bill.finish_date - g_bill.start_date +
                              timedelta(minutes=30)).total_seconds()
                    hh_units_consumed = units_consumed / (bill_s / (60 * 30))

                    cf = float(hist_g_era.correction_factor)
                    g_unit = hist_g_era.g_unit
                    unit_code, unit_factor = g_unit.code, float(g_unit.factor)
                    for hh_date in hh_range(caches, g_bill.start_date,
                                            g_bill.finish_date):
                        cv, avg_cv = find_cv(sess, caches, g_cv_id, hh_date,
                                             self.g_ldz_code)
                        hist_map[hh_date] = {
                            'unit_code': unit_code,
                            'unit_factor': unit_factor,
                            'units_consumed': hh_units_consumed,
                            'correction_factor': cf,
                            'calorific_value': cv,
                            'avg_cv': avg_cv
                        }

        for d in datum_range(sess, self.caches, self.years_back, start_date,
                             finish_date):
            h = d.copy()
            hist_start = h['hist_start']
            h.update(hist_map.get(hist_start, {}))
            h['kwh'] = h['units_consumed'] * h['unit_factor'] * \
                h['correction_factor'] * h['calorific_value'] / 3.6
            h['kwh_avg'] = h['units_consumed'] * h['unit_factor'] * \
                h['correction_factor'] * h['avg_cv'] / 3.6
            h['ug_rate'] = float(
                get_file_rates(
                    self.caches, 'g_ug',
                    h['start_date'])['ug_gbp_per_kwh'][self.g_exit_zone_code])
            self.hh_data.append(h)
Exemple #24
0
def content(supply_id, file_name, start_date, finish_date, user):
    caches = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "supply_virtual_bills_" + str(supply_id) + ".csv", user)
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")

        supply = Supply.get_by_id(sess, supply_id)

        forecast_date = chellow.computer.forecast_date()

        prev_titles = None

        for era in (sess.query(Era).filter(
                Era.supply == supply,
                Era.start_date < finish_date,
                or_(Era.finish_date == null(), Era.finish_date > start_date),
        ).order_by(Era.start_date)):

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            site = (sess.query(Site).join(SiteEra).filter(
                SiteEra.era == era, SiteEra.is_physical == true()).one())

            ds = chellow.computer.SupplySource(
                sess,
                chunk_start,
                chunk_finish,
                forecast_date,
                era,
                era.imp_supplier_contract is not None,
                caches,
            )

            titles = [
                "Imp MPAN Core",
                "Exp MPAN Core",
                "Site Code",
                "Site Name",
                "Account",
                "From",
                "To",
                "",
            ]

            output_line = [
                era.imp_mpan_core,
                era.exp_mpan_core,
                site.code,
                site.name,
                ds.supplier_account,
                hh_format(ds.start_date),
                hh_format(ds.finish_date),
                "",
            ]

            mop_titles = ds.contract_func(era.mop_contract,
                                          "virtual_bill_titles")()
            titles.extend(["mop-" + t for t in mop_titles])

            ds.contract_func(era.mop_contract, "virtual_bill")(ds)
            bill = ds.mop_bill
            for title in mop_titles:
                if title in bill:
                    output_line.append(bill[title])
                    del bill[title]
                else:
                    output_line.append("")

            for k in sorted(bill.keys()):
                output_line.extend([k, bill[k]])

            output_line.append("")
            dc_titles = ds.contract_func(era.dc_contract,
                                         "virtual_bill_titles")()
            titles.append("")
            titles.extend(["dc-" + t for t in dc_titles])

            ds.contract_func(era.dc_contract, "virtual_bill")(ds)
            bill = ds.dc_bill
            for title in dc_titles:
                output_line.append(bill.get(title, ""))
                if title in bill:
                    del bill[title]
            for k in sorted(bill.keys()):
                output_line.extend([k, bill[k]])

            tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join(
                Ssc).join(Era).filter(
                    Era.start_date <= chunk_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= chunk_start),
                ).order_by(Tpr.code).distinct())

            if era.imp_supplier_contract is not None:
                output_line.append("")
                supplier_titles = ds.contract_func(era.imp_supplier_contract,
                                                   "virtual_bill_titles")()
                for tpr in tpr_query.filter(
                        Era.imp_supplier_contract != null()):
                    for suffix in ("-kwh", "-rate", "-gbp"):
                        supplier_titles.append(tpr.code + suffix)
                titles.append("")
                titles.extend(["imp-supplier-" + t for t in supplier_titles])

                ds.contract_func(era.imp_supplier_contract, "virtual_bill")(ds)
                bill = ds.supplier_bill

                for title in supplier_titles:
                    if title in bill:
                        output_line.append(bill[title])
                        del bill[title]
                    else:
                        output_line.append("")

                for k in sorted(bill.keys()):
                    output_line.extend([k, bill[k]])

            if era.exp_supplier_contract is not None:
                ds = chellow.computer.SupplySource(sess, chunk_start,
                                                   chunk_finish, forecast_date,
                                                   era, False, caches)

                output_line.append("")
                supplier_titles = ds.contract_func(era.exp_supplier_contract,
                                                   "virtual_bill_titles")()
                for tpr in tpr_query.filter(
                        Era.exp_supplier_contract != null()):
                    for suffix in ("-kwh", "-rate", "-gbp"):
                        supplier_titles.append(tpr.code + suffix)
                titles.append("")
                titles.extend(["exp-supplier-" + t for t in supplier_titles])

                ds.contract_func(era.exp_supplier_contract, "virtual_bill")(ds)
                bill = ds.supplier_bill
                for title in supplier_titles:
                    output_line.append(bill.get(title, ""))
                    if title in bill:
                        del bill[title]

                for k in sorted(bill.keys()):
                    output_line.extend([k, bill[k]])

            if titles != prev_titles:
                prev_titles = titles
                writer.writerow([str(v) for v in titles])
            for i, val in enumerate(output_line):
                output_line[i] = csv_make_val(val)
            writer.writerow(output_line)
    except BadRequest as e:
        writer.writerow(["Problem: " + e.description])
    except BaseException:
        writer.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
def content(base_name, site_id, g_supply_id, user, compression, start_date,
            months):
    now = utc_datetime_now()
    report_context = {}
    sess = None

    try:
        sess = Session()
        base_name.append(
            hh_format(start_date).replace(' ',
                                          '_').replace(':',
                                                       '').replace('-', ''))

        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        forecast_from = chellow.computer.forecast_date()

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if g_supply_id is not None:
            g_supply = GSupply.get_by_id(sess, g_supply_id)
            base_name.append('g_supply')
            base_name.append(str(g_supply.id))
            sites = sites.join(SiteGEra).join(GEra).filter(
                GEra.g_supply == g_supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        site_rows = []
        g_era_rows = []

        era_header_titles = [
            'creation_date', 'mprn', 'supply_name', 'exit_zone', 'msn', 'unit',
            'contract', 'site_id', 'site_name', 'associated_site_ids', 'month'
        ]
        site_header_titles = [
            'creation_date', 'site_id', 'site_name', 'associated_site_ids',
            'month'
        ]
        summary_titles = ['kwh', 'gbp', 'billed_kwh', 'billed_gbp']

        vb_titles = []
        conts = sess.query(GContract).join(GEra).join(GSupply).filter(
            GEra.start_date <= finish_date,
            or_(GEra.finish_date == null(),
                GEra.finish_date >= start_date)).distinct().order_by(
                    GContract.id)
        if g_supply_id is not None:
            conts = conts.filter(GEra.g_supply_id == g_supply_id)
        for cont in conts:
            title_func = chellow.computer.contract_func(
                report_context, cont, 'virtual_bill_titles')
            if title_func is None:
                raise Exception("For the contract " + cont.name +
                                " there doesn't seem " +
                                "to be a 'virtual_bill_titles' function.")
            for title in title_func():
                if title not in vb_titles:
                    vb_titles.append(title)

        g_era_rows.append(era_header_titles + summary_titles + vb_titles)
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        month_start = start_date
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0
                for g_era in sess.query(GEra).join(SiteGEra).filter(
                        SiteGEra.site == site, SiteGEra.is_physical == true(),
                        GEra.start_date <= month_finish,
                        or_(GEra.finish_date == null(),
                            GEra.finish_date >= month_start)).options(
                                joinedload(GEra.g_contract),
                                joinedload(GEra.g_supply),
                                joinedload(GEra.g_supply).joinedload(
                                    GSupply.g_exit_zone)).order_by(GEra.id):

                    g_supply = g_era.g_supply

                    if g_supply_id is not None and g_supply.id != g_supply_id:
                        continue

                    ss_start = hh_max(g_era.start_date, month_start)
                    ss_finish = hh_min(g_era.finish_date, month_finish)

                    ss = GDataSource(sess, ss_start, ss_finish, forecast_from,
                                     g_era, report_context, None)

                    contract = g_era.g_contract
                    vb_function = contract_func(report_context, contract,
                                                'virtual_bill')
                    if vb_function is None:
                        raise BadRequest(
                            "The contract " + contract.name +
                            " doesn't have the virtual_bill() function.")
                    vb_function(ss)
                    bill = ss.bill

                    try:
                        gbp = bill['net_gbp']
                    except KeyError:
                        gbp = 0
                        bill['problem'] += 'For the supply ' + ss.mprn + \
                            ' the virtual bill ' + str(bill) + \
                            ' from the contract ' + contract.name + \
                            ' does not contain the net_gbp key.'
                    try:
                        kwh = bill['kwh']
                    except KeyError:
                        kwh = 0
                        bill['problem'] += "For the supply " + ss.mprn + \
                            " the virtual bill " + str(bill) + \
                            " from the contract " + contract.name + \
                            " does not contain the 'kwh' key."

                    billed_kwh = billed_gbp = 0

                    g_era_associates = {
                        s.site.code
                        for s in g_era.site_g_eras if not s.is_physical
                    }

                    for g_bill in sess.query(GBill).filter(
                            GBill.g_supply == g_supply,
                            GBill.start_date <= ss_finish,
                            GBill.finish_date >= ss_start):
                        bill_start = g_bill.start_date
                        bill_finish = g_bill.finish_date
                        bill_duration = (
                            bill_finish - bill_start).total_seconds() + \
                            (30 * 60)
                        overlap_duration = (min(bill_finish, ss_finish) - max(
                            bill_start, ss_start)).total_seconds() + (30 * 60)
                        overlap_proportion = overlap_duration / bill_duration
                        billed_kwh += overlap_proportion * float(g_bill.kwh)
                        billed_gbp += overlap_proportion * float(g_bill.net)

                    associated_site_ids = ','.join(sorted(g_era_associates))
                    g_era_rows.append([
                        now, g_supply.mprn, g_supply.name, g_supply.g_exit_zone
                        .code, g_era.msn, g_era.g_unit.code, contract.name,
                        site.code, site.name, associated_site_ids,
                        month_finish, kwh, gbp, billed_kwh, billed_gbp
                    ] + [make_val(bill.get(t)) for t in vb_titles])

                    site_kwh += kwh
                    site_gbp += gbp
                    site_billed_kwh += billed_kwh
                    site_billed_gbp += billed_gbp

                linked_sites = ', '.join(s.code
                                         for s in site.find_linked_sites(
                                             sess, month_start, month_finish))

                site_rows.append([
                    now, site.code, site.name, linked_sites, month_finish,
                    site_kwh, site_gbp, site_billed_kwh, site_billed_gbp
                ])
                sess.rollback()
            write_spreadsheet(rf, compression, site_rows, g_era_rows)
            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Exemple #26
0
def content(
        start_year, start_month, start_day, finish_year, finish_month,
        finish_day, is_import, supply_id, user):
    caches = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'daily_supplier_virtual_bill.csv', user)
        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')
        start_date = Datetime(
            start_year, start_month, start_day, tzinfo=pytz.utc)
        finish_date = Datetime(
            finish_year, finish_month, finish_day, tzinfo=pytz.utc) + \
            relativedelta(days=1) - HH

        supply = Supply.get_by_id(sess, supply_id)
        forecast_date = chellow.computer.forecast_date()
        day_start = start_date
        header_titles = [
            'MPAN Core', 'Site Code', 'Site Name', 'Account', 'From', 'To',
            'Is Forecast?'
        ]

        bill_titles = []
        # Find titles
        for era in sess.query(Era).filter(
                Era.supply == supply, Era.start_date <= finish_date, or_(
                    Era.finish_date == null(),
                    Era.finish_date >= start_date)):

            if is_import:
                cont = era.imp_supplier_contract
            else:
                cont = era.exp_supplier_contract

            for title in chellow.computer.contract_func(
                    caches, cont, 'virtual_bill_titles')():
                if title not in bill_titles:
                    bill_titles.append(title)

            ssc = era.ssc
            if ssc is not None:
                for mr in ssc.measurement_requirements:
                    for suffix in ('-kwh', '-rate', '-gbp'):
                        title = mr.tpr.code + suffix
                        if title not in bill_titles:
                            bill_titles.append(title)

        writer.writerow(header_titles + bill_titles)

        while not day_start > finish_date:
            day_finish = day_start + relativedelta(days=1) - HH

            for era in supply.find_eras(sess, day_start, day_finish):
                chunk_start = hh_max(era.start_date, day_start)
                chunk_finish = hh_min(era.finish_date, day_finish)

                ss = chellow.computer.SupplySource(
                    sess, chunk_start, chunk_finish, forecast_date, era,
                    is_import, caches)

                site = sess.query(Site).join(SiteEra).filter(
                    SiteEra.era == era, SiteEra.is_physical == true()).one()
                row = [
                    ss.mpan_core, site.code, site.name, ss.supplier_account,
                    hh_format(ss.start_date), hh_format(ss.finish_date),
                    ss.years_back > 0]

                chellow.computer.contract_func(
                    caches, ss.supplier_contract, 'virtual_bill')(ss)
                bill = ss.supplier_bill
                for title in bill_titles:
                    if title in bill:
                        row.append(csv_make_val(bill[title]))
                        del bill[title]
                    else:
                        row.append('')

                for k in sorted(bill.keys()):
                    row.append(k)
                    row.append(csv_make_val(bill[k]))
                writer.writerow(row)

            day_start += relativedelta(days=1)
    except BadRequest as e:
        writer.writerow(["Problem: " + e.description])
    except BaseException:
        writer.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #27
0
def _process_hh(ds, rate_period, est_kw, hh):
    month_start, month_finish = next(
        c_months_u(start_year=hh["ct-year"], start_month=hh["ct-month"]))

    month_start_ct = to_ct(month_start)
    if month_start_ct.month > 3:
        year = month_start_ct.year
    else:
        year = month_start_ct.year - 1
    financial_year_start = to_utc(ct_datetime(year, 4, 1))
    last_financial_year_start = to_utc(ct_datetime(year - 1, 4, 1))
    financial_year_finish = to_utc(ct_datetime(year + 1, 3, 31, 23, 30))

    est_triad_kws = []
    earliest_triad = None
    for dt in get_file_rates(ds.caches, "triad_dates",
                             last_financial_year_start)["triad_dates"]:
        triad_hh = None
        earliest_triad = hh_min(earliest_triad, dt)
        try:
            d = next(ds.get_data_sources(dt, dt, financial_year_start))
            chellow.duos.duos_vb(d)
            triad_hh = d.hh_data[0]

            while dt < financial_year_start:
                dt += relativedelta(years=1)

            for d in ds.get_data_sources(dt, dt, financial_year_start):
                chellow.duos.duos_vb(d)
                datum = d.hh_data[0]
                triad_hh["laf"] = datum["laf"]
                triad_hh["gsp-kw"] = datum["laf"] * triad_hh["msp-kw"]
        except StopIteration:
            triad_hh = {
                "hist-start": dt,
                "msp-kw": 0,
                "start-date": dt,
                "status": "before start of MPAN",
                "laf": 1,
                "gsp-kw": 0,
            }
        est_triad_kws.append(triad_hh)

    if ds.site is None:
        era = ds.supply.find_era_at(ds.sess, earliest_triad)
        if (era is None
                or era.get_channel(ds.sess, ds.is_import, "ACTIVE") is None
                and est_kw is None):
            est_kw = 0.85 * max(datum["msp-kwh"] for datum in ds.hh_data) * 2
        if est_kw is not None:
            for est_datum in est_triad_kws:
                est_datum["msp-kw"] = est_kw
                est_datum["gsp-kw"] = est_datum["msp-kw"] * est_datum["laf"]

    gsp_kw = 0
    for i, triad_hh in enumerate(est_triad_kws):
        triad_prefix = "triad-estimate-" + str(i + 1)
        hh[triad_prefix + "-date"] = triad_hh["hist-start"]
        hh[triad_prefix + "-msp-kw"] = triad_hh["msp-kw"]
        hh[triad_prefix + "-status"] = triad_hh["status"]
        hh[triad_prefix + "-laf"] = triad_hh["laf"]
        hh[triad_prefix + "-gsp-kw"] = triad_hh["gsp-kw"]
        gsp_kw += triad_hh["gsp-kw"]

    hh["triad-estimate-gsp-kw"] = gsp_kw / 3
    polarity = "import" if ds.llfc.is_import else "export"
    gsp_group_code = ds.gsp_group_code
    rate = float(
        get_file_rates(
            ds.caches, "triad_rates",
            month_start)["triad_gbp_per_gsp_kw"][polarity][gsp_group_code])

    hh["triad-estimate-rate"] = rate

    est_triad_gbp = hh["triad-estimate-rate"] * hh["triad-estimate-gsp-kw"]

    if rate_period == "monthly":
        total_intervals = 12

        est_intervals = 1
        hh["triad-estimate-months"] = est_intervals
    else:
        dt = financial_year_start
        total_intervals = 0
        while dt <= financial_year_finish:
            total_intervals += 1
            dt += relativedelta(days=1)

        est_intervals = 0
        for d in ds.get_data_sources(month_start, month_finish):
            for h in d.hh_data:
                if h["ct-decimal-hour"] == 0:
                    est_intervals += 1

        hh["triad-estimate-days"] = est_intervals

    hh["triad-estimate-gbp"] = est_triad_gbp / total_intervals * est_intervals

    if hh["ct-month"] == 3:
        triad_kws = []
        for t_date in get_file_rates(ds.caches, "triad_dates",
                                     month_start)["triad_dates"]:
            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh = d.hh_data[0]
                else:
                    thh = {
                        "hist-start": t_date,
                        "msp-kw": 0,
                        "start-date": t_date,
                        "status": "before contract",
                        "laf": "before contract",
                        "gsp-kw": 0,
                    }
            except StopIteration:
                thh = {
                    "hist-start": t_date,
                    "msp-kw": 0,
                    "start-date": t_date,
                    "status": "before start of supply",
                    "laf": "before start of supply",
                    "gsp-kw": 0,
                }

            while t_date < financial_year_start:
                t_date += relativedelta(years=1)

            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh["laf"] = d.hh_data[0]["laf"]
                    thh["gsp-kw"] = thh["laf"] * thh["msp-kw"]
            except StopIteration:
                pass

            triad_kws.append(thh)
        gsp_kw = 0

        for i, triad_hh in enumerate(triad_kws):
            pref = "triad-actual-" + str(i + 1)
            hh[pref + "-date"] = triad_hh["start-date"]
            hh[pref + "-msp-kw"] = triad_hh["msp-kw"]
            hh[pref + "-status"] = triad_hh["status"]
            hh[pref + "-laf"] = triad_hh["laf"]
            hh[pref + "-gsp-kw"] = triad_hh["gsp-kw"]
            gsp_kw += triad_hh["gsp-kw"]

        hh["triad-actual-gsp-kw"] = gsp_kw / 3
        polarity = "import" if ds.llfc.is_import else "export"
        gsp_group_code = ds.gsp_group_code
        tot_rate = 0
        for start_date, finish_date, script in get_file_scripts("triad_rates"):
            if start_date <= financial_year_finish and not hh_before(
                    finish_date, financial_year_start):
                start_month = to_ct(start_date).month
                if start_month < 4:
                    start_month += 12

                if finish_date is None:
                    finish_month = 3
                else:
                    finish_month = to_ct(finish_date).month

                if finish_month < 4:
                    finish_month += 12

                rt = get_file_rates(
                    ds.caches, "triad_rates", start_date
                )["triad_gbp_per_gsp_kw"][polarity][gsp_group_code]
                tot_rate += (finish_month - start_month + 1) * float(rt)

        rate = tot_rate / 12
        hh["triad-actual-rate"] = rate

        hh["triad-actual-gbp"] = hh["triad-actual-rate"] * hh[
            "triad-actual-gsp-kw"]

        era = ds.supply.find_era_at(ds.sess, month_finish)
        est_intervals = 0

        interval = (relativedelta(
            months=1) if rate_period == "monthly" else relativedelta(days=1))

        dt = month_finish
        while era is not None and dt > financial_year_start:
            est_intervals += 1
            dt -= interval
            if hh_after(dt, era.finish_date):
                era = ds.supply.find_era_at(ds.sess, dt)

        if rate_period == "monthly":
            hh["triad-all-estimates-months"] = est_intervals
        else:
            hh["triad-all-estimates-days"] = est_intervals
        hh["triad-all-estimates-gbp"] = (est_triad_gbp / total_intervals *
                                         est_intervals * -1)
Exemple #28
0
def content(contract_id, end_year, end_month, months, user):
    caches = {}
    sess = f = supply_source = None
    try:
        sess = Session()
        contract = Contract.get_dc_by_id(sess, contract_id)

        finish_date = utc_datetime(end_year, end_month, 1) + MONTH - HH
        start_date = utc_datetime(end_year, end_month,
                                  1) - relativedelta(months=months - 1)

        forecast_date = chellow.computer.forecast_date()
        running_name, finished_name = chellow.dloads.make_names(
            'dc_virtual_bills.csv', user)

        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')

        bill_titles = chellow.computer.contract_func(caches, contract,
                                                     'virtual_bill_titles')()
        header_titles = [
            'Import MPAN Core', 'Export MPAN Core', 'Start Date', 'Finish Date'
        ]

        vb_func = chellow.computer.contract_func(caches, contract,
                                                 'virtual_bill')

        writer.writerow(header_titles + bill_titles)

        for era in sess.query(Era).distinct().filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                Era.dc_contract == contract).options(joinedload(
                    Era.channels)).order_by(Era.supply_id):

            imp_mpan_core = era.imp_mpan_core
            if imp_mpan_core is None:
                imp_mpan_core_str = ''
                is_import = False
            else:
                is_import = True
                imp_mpan_core_str = imp_mpan_core

            exp_mpan_core = era.exp_mpan_core
            exp_mpan_core_str = '' if exp_mpan_core is None else exp_mpan_core

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)

            vals = [
                imp_mpan_core_str, exp_mpan_core_str,
                hh_format(chunk_start),
                hh_format(chunk_finish)
            ]

            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.dc_bill

            for title in bill_titles:
                vals.append(csv_make_val(bill.get(title)))
                if title in bill:
                    del bill[title]

            for k in sorted(bill.keys()):
                vals.append(k)
                vals.append(csv_make_val(bill[k]))

            writer.writerow(vals)

            # Avoid long-running transactions
            sess.rollback()
    except BadRequest as e:
        msg = 'Problem '
        if supply_source is not None:
            msg += "with supply " + supply_source.mpan_core + \
                " starting at " + hh_format(supply_source.start_date) + " "
        msg += str(e)
        writer.writerow([msg])
    except BaseException:
        msg = "Problem " + traceback.format_exc() + '\n'
        f.write(msg)
    finally:
        f.close()
        os.rename(running_name, finished_name)
        if sess is not None:
            sess.close()
Exemple #29
0
def content(running_name, finished_name, date, supply_id, mpan_cores):
    sess = None
    try:
        sess = Session()
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")
        titles = (
            "Date",
            "Import MPAN Core",
            "Export MPAN Core",
            "Physical Site Id",
            "Physical Site Name",
            "Other Site Ids",
            "Other Site Names",
            "Supply Id",
            "Source",
            "Generator Type",
            "GSP Group",
            "DNO Name",
            "Voltage Level",
            "Is Substations",
            "Metering Type",
            "Mandatory HH",
            "PC",
            "MTC",
            "CoP",
            "SSC Code",
            "SSC Description",
            "Energisation Status",
            "Number Of Registers",
            "MOP Contract",
            "Mop Account",
            "DC Contract",
            "DC Account",
            "Meter Serial Number",
            "Meter Installation Date",
            "Latest Normal Meter Read Date",
            "Latest Normal Meter Read Type",
            "Latest DC Bill Date",
            "Latest MOP Bill Date",
            "Supply Start Date",
            "Supply Finish Date",
            "Properties",
            "Import ACTIVE?",
            "Import REACTIVE_IMPORT?",
            "Import REACTIVE_EXPORT?",
            "Export ACTIVE?",
            "Export REACTIVE_IMPORT?",
            "Export REACTIVE_EXPORT?",
            "Import Agreed Supply Capacity (kVA)",
            "Import LLFC Code",
            "Import LLFC Description",
            "Import Supplier Contract",
            "Import Supplier Account",
            "Import Mandatory kW",
            "Latest Import Supplier Bill Date",
            "Export Agreed Supply Capacity (kVA)",
            "Export LLFC Code",
            "Export LLFC Description",
            "Export Supplier Contract",
            "Export Supplier Account",
            "Export Mandatory kW",
            "Latest Export Supplier Bill Date",
        )
        writer.writerow(titles)

        NORMAL_READ_TYPES = ("N", "C", "N3")
        year_start = date + HH - relativedelta(years=1)

        era_ids = (sess.query(Era.id).filter(
            Era.start_date <= date,
            or_(Era.finish_date == null(), Era.finish_date >= date),
        ).order_by(Era.supply_id))

        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)

            era_ids = era_ids.filter(Era.supply == supply)

        if mpan_cores is not None:
            era_ids = era_ids.filter(
                or_(Era.imp_mpan_core.in_(mpan_cores),
                    Era.exp_mpan_core.in_(mpan_cores)))

        for (era_id, ) in era_ids:

            era, supply, generator_type = (sess.query(
                Era, Supply, GeneratorType).join(
                    Supply, Era.supply_id == Supply.id).outerjoin(
                        GeneratorType,
                        Supply.generator_type_id == GeneratorType.id).filter(
                            Era.id == era_id).options(
                                joinedload(Era.channels),
                                joinedload(Era.cop),
                                joinedload(Era.dc_contract),
                                joinedload(Era.exp_llfc),
                                joinedload(Era.exp_supplier_contract),
                                joinedload(Era.imp_llfc),
                                joinedload(Era.imp_supplier_contract),
                                joinedload(Era.mop_contract),
                                joinedload(Era.mtc),
                                joinedload(Era.mtc).joinedload(Mtc.meter_type),
                                joinedload(Era.pc),
                                joinedload(Era.site_eras).joinedload(
                                    SiteEra.site),
                                joinedload(Era.ssc),
                                joinedload(Era.energisation_status),
                                joinedload(Era.supply).joinedload(
                                    Supply.source),
                                joinedload(Era.supply).joinedload(
                                    Supply.gsp_group),
                                joinedload(Era.supply).joinedload(Supply.dno),
                            ).one())

            site_codes = []
            site_names = []
            for site_era in era.site_eras:
                if site_era.is_physical:
                    physical_site = site_era.site
                else:
                    site = site_era.site
                    site_codes.append(site.code)
                    site_names.append(site.name)

            sup_eras = (sess.query(Era).filter(Era.supply == supply).order_by(
                Era.start_date).all())
            supply_start_date = sup_eras[0].start_date
            supply_finish_date = sup_eras[-1].finish_date

            if era.imp_mpan_core is None:
                voltage_level_code = era.exp_llfc.voltage_level.code
                is_substation = era.exp_llfc.is_substation
            else:
                voltage_level_code = era.imp_llfc.voltage_level.code
                is_substation = era.imp_llfc.is_substation

            if generator_type is None:
                generator_type_str = ""
            else:
                generator_type_str = generator_type.code

            metering_type = era.meter_category

            if metering_type in ("nhh", "amr"):
                latest_prev_normal_read = (
                    sess.query(RegisterRead).join(Bill).join(
                        RegisterRead.previous_type).filter(
                            ReadType.code.in_(NORMAL_READ_TYPES),
                            RegisterRead.previous_date <= date,
                            Bill.supply_id == supply.id,
                        ).order_by(RegisterRead.previous_date.desc()).options(
                            joinedload(RegisterRead.previous_type)).first())

                latest_pres_normal_read = (
                    sess.query(RegisterRead).join(Bill).join(
                        RegisterRead.present_type).filter(
                            ReadType.code.in_(NORMAL_READ_TYPES),
                            RegisterRead.present_date <= date,
                            Bill.supply == supply,
                        ).order_by(RegisterRead.present_date.desc()).options(
                            joinedload(RegisterRead.present_type)).first())

                if latest_prev_normal_read is None and latest_pres_normal_read is None:
                    latest_normal_read_date = None
                    latest_normal_read_type = None
                elif (latest_pres_normal_read is not None
                      and latest_prev_normal_read is None):
                    latest_normal_read_date = latest_pres_normal_read.present_date
                    latest_normal_read_type = latest_pres_normal_read.present_type.code
                elif (latest_pres_normal_read is None
                      and latest_prev_normal_read is not None):
                    latest_normal_read_date = latest_prev_normal_read.previous_date
                    latest_normal_read_type = latest_prev_normal_read.previous_type.code
                elif (latest_pres_normal_read.present_date >
                      latest_prev_normal_read.previous_date):
                    latest_normal_read_date = latest_pres_normal_read.present_date
                    latest_normal_read_type = latest_pres_normal_read.present_type.code
                else:
                    latest_normal_read_date = latest_prev_normal_read.previous_date
                    latest_normal_read_type = latest_prev_normal_read.previous_type.code
                if latest_normal_read_date is not None:
                    latest_normal_read_date = hh_format(
                        latest_normal_read_date)

            else:
                latest_normal_read_date = metering_type
                latest_normal_read_type = None

            mop_contract = era.mop_contract
            mop_contract_name = mop_contract.name
            mop_account = era.mop_account
            latest_mop_bill_date = (sess.query(
                Bill.finish_date).join(Batch).filter(
                    Bill.start_date <= date,
                    Bill.supply == supply,
                    Batch.contract == mop_contract,
                ).order_by(Bill.finish_date.desc()).first())

            if latest_mop_bill_date is not None:
                latest_mop_bill_date = hh_format(latest_mop_bill_date[0])

            dc_contract = era.dc_contract
            dc_contract_name = dc_contract.name
            dc_account = era.dc_account
            latest_dc_bill_date = (sess.query(
                Bill.finish_date).join(Batch).filter(
                    Bill.start_date <= date,
                    Bill.supply == supply,
                    Batch.contract == dc_contract,
                ).order_by(Bill.finish_date.desc()).first())

            if latest_dc_bill_date is not None:
                latest_dc_bill_date = hh_format(latest_dc_bill_date[0])

            channel_values = []
            for imp_related in [True, False]:
                for channel_type in CHANNEL_TYPES:
                    if era.find_channel(sess, imp_related,
                                        channel_type) is None:
                        channel_values.append("false")
                    else:
                        channel_values.append("true")

            imp_avg_months = None
            exp_avg_months = None
            for is_import in [True, False]:
                if metering_type == "nhh":
                    continue

                params = {
                    "supply_id": supply.id,
                    "year_start": year_start,
                    "year_finish": date,
                    "is_import": is_import,
                }
                month_mds = tuple(md[0] * 2 for md in sess.execute(
                    """

    select max(hh_datum.value) as md
    from hh_datum join channel on (hh_datum.channel_id = channel.id)
        join era on (channel.era_id = era.id)
    where era.supply_id = :supply_id and hh_datum.start_date >= :year_start
        and hh_datum.start_date <= :year_finish
        and channel.channel_type = 'ACTIVE'
        and channel.imp_related = :is_import
    group by extract(month from (hh_datum.start_date at time zone 'utc'))
    order by md desc
    limit 3

    """,
                    params=params,
                ))

                avg_months = sum(month_mds)
                if len(month_mds) > 0:
                    avg_months /= len(month_mds)
                    if is_import:
                        imp_avg_months = avg_months
                    else:
                        exp_avg_months = avg_months

            if (imp_avg_months is not None
                    and imp_avg_months > 100) or (exp_avg_months is not None
                                                  and exp_avg_months > 100):
                mandatory_hh = "yes"
            else:
                mandatory_hh = "no"

            imp_latest_supplier_bill_date = None
            exp_latest_supplier_bill_date = None
            for is_import in (True, False):
                for er in (sess.query(Era).filter(
                        Era.supply == era.supply,
                        Era.start_date <= date).order_by(
                            Era.start_date.desc())):
                    if is_import:
                        if er.imp_mpan_core is None:
                            break
                        else:
                            supplier_contract = er.imp_supplier_contract
                    else:
                        if er.exp_mpan_core is None:
                            break
                        else:
                            supplier_contract = er.exp_supplier_contract

                    latest_bill_date = (sess.query(
                        Bill.finish_date).join(Batch).filter(
                            Bill.finish_date >= er.start_date,
                            Bill.finish_date <= hh_min(er.finish_date, date),
                            Bill.supply == supply,
                            Batch.contract == supplier_contract,
                        ).order_by(Bill.finish_date.desc()).first())

                    if latest_bill_date is not None:
                        latest_bill_date = hh_format(latest_bill_date[0])

                        if is_import:
                            imp_latest_supplier_bill_date = latest_bill_date
                        else:
                            exp_latest_supplier_bill_date = latest_bill_date
                        break

            meter_installation_date = (sess.query(func.min(
                Era.start_date)).filter(Era.supply == era.supply,
                                        Era.msn == era.msn).one()[0])

            ssc = era.ssc
            if ssc is None:
                ssc_code = ssc_description = num_registers = None
            else:
                ssc_code, ssc_description = ssc.code, ssc.description
                num_registers = (sess.query(MeasurementRequirement).filter(
                    MeasurementRequirement.ssc == ssc).count())

            vals = ([
                date,
                era.imp_mpan_core,
                era.exp_mpan_core,
                physical_site.code,
                physical_site.name,
                ", ".join(site_codes),
                ", ".join(site_names),
                supply.id,
                supply.source.code,
                generator_type_str,
                supply.gsp_group.code,
                supply.dno.dno_code,
                voltage_level_code,
                is_substation,
                metering_type,
                mandatory_hh,
                era.pc.code,
                era.mtc.code,
                era.cop.code,
                ssc_code,
                ssc_description,
                era.energisation_status.code,
                num_registers,
                mop_contract_name,
                mop_account,
                dc_contract_name,
                dc_account,
                era.msn,
                meter_installation_date,
                latest_normal_read_date,
                latest_normal_read_type,
                latest_dc_bill_date,
                latest_mop_bill_date,
                supply_start_date,
                supply_finish_date,
                era.properties,
            ] + channel_values + [
                era.imp_sc,
                None if era.imp_llfc is None else era.imp_llfc.code,
                None if era.imp_llfc is None else era.imp_llfc.description,
                None if era.imp_supplier_contract is None else
                era.imp_supplier_contract.name,
                era.imp_supplier_account,
                imp_avg_months,
                imp_latest_supplier_bill_date,
            ] + [
                era.exp_sc,
                None if era.exp_llfc is None else era.exp_llfc.code,
                None if era.exp_llfc is None else era.exp_llfc.description,
                None if era.exp_supplier_contract is None else
                era.exp_supplier_contract.name,
                era.exp_supplier_account,
                exp_avg_months,
                exp_latest_supplier_bill_date,
            ])
            writer.writerow([csv_make_val(v) for v in vals])

            # Avoid a long-running transaction
            sess.rollback()
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #30
0
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user,
            compression):
    now = utc_datetime_now()
    report_context = {}

    try:
        comp = report_context['computer']
    except KeyError:
        comp = report_context['computer'] = {}

    try:
        rate_cache = comp['rates']
    except KeyError:
        rate_cache = comp['rates'] = {}

    try:
        ind_cont = report_context['contract_names']
    except KeyError:
        ind_cont = report_context['contract_names'] = {}

    sess = None
    try:
        sess = Session()
        if scenario_props is None:
            scenario_contract = Contract.get_supplier_by_id(sess, scenario_id)
            scenario_props = scenario_contract.make_properties()
            base_name.append(scenario_contract.name)

        start_date = scenario_props['scenario_start']
        if start_date is None:
            start_date = utc_datetime(now.year, now.month, 1)
        else:
            start_date = to_utc(start_date)

        base_name.append(
            hh_format(start_date).replace(' ',
                                          '_').replace(':',
                                                       '').replace('-', ''))

        months = scenario_props['scenario_duration']
        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        if 'forecast_from' in scenario_props:
            forecast_from = scenario_props['forecast_from']
        else:
            forecast_from = None

        if forecast_from is None:
            forecast_from = chellow.computer.forecast_date()
        else:
            forecast_from = to_utc(forecast_from)

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append('supply')
            base_name.append(str(supply.id))
            sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        site_rows = []
        era_rows = []

        for rate_script in get_map_list(scenario_props, 'local_rates'):
            contract_id = rate_script['contract_id']
            try:
                cont_cache = rate_cache[contract_id]
            except KeyError:
                cont_cache = rate_cache[contract_id] = {}

            try:
                rate_script_start = rate_script['start_date']
            except KeyError:
                raise BadRequest(
                    "Problem in the scenario properties. Can't find the " +
                    "'start_date' key of the contract " + str(contract_id) +
                    " in the 'local_rates' map.")

            try:
                rate_script_start = rate_script['start_date']
            except KeyError:
                raise BadRequest(
                    "Problem in the scenario properties. Can't find the " +
                    "'start_date' key of the contract " + str(contract_id) +
                    " in the 'local_rates' map.")

            for dt in hh_range(report_context, rate_script_start,
                               rate_script['finish_date']):
                cont_cache[dt] = PropDict('scenario properties',
                                          rate_script['script'])

        for rate_script in get_map_list(scenario_props, 'industry_rates'):
            contract_name = rate_script['contract_name']
            try:
                cont_cache = ind_cont[contract_name]
            except KeyError:
                cont_cache = ind_cont[contract_name] = {}

            rfinish = rate_script['finish_date']
            if rfinish is None:
                raise BadRequest("For the industry rate " + contract_name +
                                 " the "
                                 "finish_date can't be null.")
            for dt in hh_range(report_context, rate_script['start_date'],
                               rfinish):
                cont_cache[dt] = PropDict('scenario properties',
                                          rate_script['script'])

        era_maps = scenario_props.get('era_maps', {})

        scenario_hh = scenario_props.get('hh_data', {})

        era_header_titles = [
            'creation-date', 'imp-mpan-core', 'imp-supplier-contract',
            'exp-mpan-core', 'exp-supplier-contract', 'metering-type',
            'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id',
            'site-name', 'associated-site-ids', 'month'
        ]
        site_header_titles = [
            'creation-date', 'site-id', 'site-name', 'associated-site-ids',
            'month', 'metering-type', 'sources', 'generator-types'
        ]
        summary_titles = [
            'import-net-kwh', 'export-net-kwh', 'import-gen-kwh',
            'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh',
            'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh',
            'import-net-gbp', 'export-net-gbp', 'import-gen-gbp',
            'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp',
            'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp',
            'billed-import-net-kwh', 'billed-import-net-gbp'
        ]

        title_dict = {}
        for cont_type, con_attr in (('mop', Era.mop_contract),
                                    ('dc', Era.dc_contract),
                                    ('imp-supplier',
                                     Era.imp_supplier_contract),
                                    ('exp-supplier',
                                     Era.exp_supplier_contract)):
            titles = []
            title_dict[cont_type] = titles
            conts = sess.query(Contract).join(con_attr).join(Era.supply). \
                join(Source).filter(
                    Era.start_date <= finish_date, or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date),
                    Source.code.in_(('net', '3rd-party'))
                ).distinct().order_by(Contract.id)
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, 'virtual_bill_titles')
                if title_func is None:
                    raise Exception("For the contract " + cont.name +
                                    " there doesn't seem to be a "
                                    "'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \
            join(Era).filter(
                Era.start_date <= finish_date, or_(
                    Era.finish_date == null(),
                    Era.finish_date >= start_date)
            ).order_by(Tpr.code).distinct()
        for tpr in tpr_query.filter(Era.imp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['imp-supplier'].append(tpr.code + suffix)
        for tpr in tpr_query.filter(Era.exp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['exp-supplier'].append(tpr.code + suffix)

        era_rows.append(
            era_header_titles + summary_titles + [None] +
            ['mop-' + t for t in title_dict['mop']] + [None] +
            ['dc-' + t for t in title_dict['dc']] + [None] +
            ['imp-supplier-' + t for t in title_dict['imp-supplier']] +
            [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']])
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        deltas = {}
        for site in sites:
            try:
                site_scenario_hh = scenario_hh[site.code]
            except KeyError:
                site_scenario_hh = scenario_hh[site.code] = {}

            site_deltas = deltas[site.code] = {'hhs': {}}
            delts = site_deltas['supply_deltas'] = {}
            for is_import in (True, False):
                delts[is_import] = {}
                for src in ('gen', 'net', 'gen-net', '3rd-party',
                            '3rd-party-reverse', 'sub'):
                    delts[is_import][src] = {'site': {}}

            earliest_delta = to_utc(Datetime.max)
            latest_delta = to_utc(Datetime.min)

            found_hh = False
            for typ in ('used', 'generated', 'parasitic', 'gen_net'):
                hh_str = site_scenario_hh.get(typ, '')
                hh_data = site_scenario_hh[typ] = {}
                for row in csv.reader(StringIO(hh_str)):
                    cells = [cell.strip() for cell in row]
                    if len(''.join(cells)) == 0:
                        continue

                    if len(cells) != 2:
                        raise BadRequest(
                            "Can't interpret the row " + str(cells) +
                            " it should be of the form 'timestamp, kWh'")

                    date_str, kw_str = cells
                    ts = parse_hh_start(date_str)
                    earliest_delta = min(ts, earliest_delta)
                    latest_delta = max(ts, latest_delta)
                    hh_data[ts] = float(kw_str)
                    found_hh = True

            if not found_hh:
                continue

            scenario_used = site_scenario_hh['used']
            scenario_generated = site_scenario_hh['generated']
            scenario_parasitic = site_scenario_hh['parasitic']
            scenario_gen_net = site_scenario_hh['gen_net']

            month_start = utc_datetime(earliest_delta.year,
                                       earliest_delta.month)
            while month_start <= latest_delta:
                month_finish = month_start + relativedelta(months=1) - HH
                chunk_start = hh_max(month_start, earliest_delta)
                chunk_finish = hh_min(month_finish, latest_delta)
                site_ds = chellow.computer.SiteSource(sess, site, chunk_start,
                                                      chunk_finish,
                                                      forecast_from,
                                                      report_context)
                hh_map = dict((h['start-date'], h) for h in site_ds.hh_data)

                for era in sess.query(Era).join(SiteEra).join(Pc).filter(
                        SiteEra.site == site, SiteEra.is_physical == true(),
                        Era.imp_mpan_core != null(), Pc.code != '00',
                        Era.start_date <= chunk_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= chunk_start),
                        ~Era.channels.any()):

                    if supply_id is not None and era.supply_id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, chunk_start)
                    ss_finish = hh_min(era.finish_date, chunk_finish)

                    ss = SupplySource(sess, ss_start, ss_finish, forecast_from,
                                      era, True, report_context)

                    for hh in ss.hh_data:
                        sdatum = hh_map[hh['start-date']]
                        sdatum['import-net-kwh'] += hh['msp-kwh']
                        sdatum['used-kwh'] += hh['msp-kwh']

                for era in sess.query(Era).join(SiteEra).join(Pc).join(
                        Supply).join(Source).filter(
                            SiteEra.site == site,
                            SiteEra.is_physical == true(),
                            Era.imp_mpan_core != null(),
                            Era.start_date <= chunk_finish,
                            or_(Era.finish_date == null(),
                                Era.finish_date >= chunk_start),
                            Source.code == 'gen-net'):

                    if supply_id is not None and era.supply_id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, chunk_start)
                    ss_finish = hh_min(era.finish_date, chunk_finish)

                    ss = SupplySource(sess, ss_start, ss_finish, forecast_from,
                                      era, False, report_context)

                    for hh in ss.hh_data:
                        sdatum = hh_map[hh['start-date']]
                        try:
                            sdatum['gen-net-kwh'] += hh['msp-kwh']
                        except KeyError:
                            sdatum['gen-net-kwh'] = hh['msp-kwh']

                for hh in site_ds.hh_data:
                    hh_start = hh['start-date']
                    if hh_start in scenario_used:
                        used_delt = scenario_used[hh_start] - hh['used-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if used_delt < 0:
                            diff = hh['import-net-kwh'] + used_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt += used_delt
                        else:
                            diff = hh['export-net-kwh'] - used_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt -= used_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['used-kwh'] += used_delt
                        hh['msp-kwh'] -= exp_net_delt

                    if hh_start in scenario_generated:
                        imp_gen_delt = scenario_generated[hh_start] - \
                            hh['import-gen-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if imp_gen_delt < 0:
                            diff = hh['export-net-kwh'] + imp_gen_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt += imp_gen_delt
                        else:
                            diff = hh['import-net-kwh'] - imp_gen_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt -= imp_gen_delt

                        try:
                            delts[True]['gen']['site'][hh_start] += \
                                imp_gen_delt
                        except KeyError:
                            delts[True]['gen']['site'][hh_start] = imp_gen_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['import-gen-kwh'] += imp_gen_delt
                        hh['msp-kwh'] -= imp_net_delt

                    if hh_start in scenario_parasitic:
                        exp_gen_delt = scenario_parasitic[hh_start] - \
                            hh['export-gen-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if exp_gen_delt < 0:
                            diff = hh['import-net-kwh'] + exp_gen_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt += exp_gen_delt
                        else:
                            diff = hh['export-net-kwh'] - exp_gen_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt -= exp_gen_delt

                        try:
                            delts[False]['gen']['site'][hh_start] += \
                                imp_gen_delt
                        except KeyError:
                            delts[False]['gen']['site'][hh_start] = \
                                exp_gen_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['export-gen-kwh'] += exp_gen_delt
                        hh['msp-kwh'] -= imp_net_delt

                    if hh_start in scenario_gen_net:
                        gen_net_delt = scenario_gen_net[hh_start] - \
                            hh['gen-net-kwh']

                        try:
                            delts[False]['gen-net']['site'][hh_start] += \
                                gen_net_delt
                        except KeyError:
                            delts[False]['gen-net']['site'][hh_start] = \
                                gen_net_delt

                        hh['import-gen-kwh'] += gen_net_delt
                        hh['export-net-kwh'] += gen_net_delt

                    site_deltas['hhs'][hh_start] = hh
                month_start += relativedelta(months=1)

        month_start = start_date
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_category = None
                site_sources = set()
                site_gen_types = set()
                site_month_data = defaultdict(int)
                calcs = []
                for era in sess.query(Era).join(SiteEra).join(Pc).filter(
                        SiteEra.site == site, SiteEra.is_physical == true(),
                        Era.start_date <= month_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= month_start)).options(
                                joinedload(Era.ssc),
                                joinedload(Era.dc_contract),
                                joinedload(Era.mop_contract),
                                joinedload(Era.imp_supplier_contract),
                                joinedload(Era.exp_supplier_contract),
                                joinedload(Era.channels),
                                joinedload(Era.imp_llfc).joinedload(
                                    Llfc.voltage_level),
                                joinedload(Era.exp_llfc).joinedload(
                                    Llfc.voltage_level), joinedload(Era.cop),
                                joinedload(Era.supply).joinedload(Supply.dno),
                                joinedload(Era.supply).joinedload(
                                    Supply.gsp_group),
                                joinedload(Era.supply).joinedload(
                                    Supply.source),
                                joinedload(Era.mtc).joinedload(Mtc.meter_type),
                                joinedload(Era.pc),
                                joinedload(Era.site_eras)).order_by(Pc.code):

                    supply = era.supply
                    if supply.generator_type is not None:
                        site_gen_types.add(supply.generator_type.code)

                    if supply_id is not None and supply.id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, month_start)
                    ss_finish = hh_min(era.finish_date, month_finish)

                    if era.imp_mpan_core is None:
                        imp_ss = None
                    else:
                        sup_deltas = site_deltas['supply_deltas'][True][
                            supply.source.code]

                        imp_ss = SupplySource(sess,
                                              ss_start,
                                              ss_finish,
                                              forecast_from,
                                              era,
                                              True,
                                              report_context,
                                              era_maps=era_maps,
                                              deltas=sup_deltas)

                    if era.exp_mpan_core is None:
                        exp_ss = None
                        measurement_type = imp_ss.measurement_type
                    else:
                        sup_deltas = site_deltas['supply_deltas'][False][
                            supply.source.code]

                        exp_ss = SupplySource(sess,
                                              ss_start,
                                              ss_finish,
                                              forecast_from,
                                              era,
                                              False,
                                              report_context,
                                              era_maps=era_maps,
                                              deltas=sup_deltas)
                        measurement_type = exp_ss.measurement_type

                    order = meter_order[measurement_type]
                    calcs.append((order, era.imp_mpan_core, era.exp_mpan_core,
                                  imp_ss, exp_ss))

                # Check if gen deltas haven't been consumed
                extra_sss = set()
                for is_imp in (True, False):
                    sup_deltas = site_deltas['supply_deltas'][is_imp]['gen']
                    if len(
                            list(t for t in sup_deltas['site']
                                 if month_start <= t <= month_finish)) > 0:
                        extra_sss.add(is_imp)

                displaced_era = chellow.computer.displaced_era(
                    sess,
                    report_context,
                    site,
                    month_start,
                    month_finish,
                    forecast_from,
                    has_scenario_generation=len(extra_sss) > 0)

                if len(extra_sss) > 0:
                    if True in extra_sss:
                        sup_deltas = site_deltas['supply_deltas'][True]['gen']
                        imp_ss_name = site.code + "_extra_gen_TRUE"
                        imp_ss = ScenarioSource(
                            sess, month_start, month_finish, True,
                            report_context, sup_deltas,
                            displaced_era.imp_supplier_contract, imp_ss_name)
                    else:
                        imp_ss_name = imp_ss = None
                    if False in extra_sss:
                        exp_ss_name = site.code + "_extra_gen_FALSE"
                        sup_deltas = site_deltas['supply_deltas'][False]['gen']
                        exp_ss = ScenarioSource(
                            sess, month_start, month_finish, False,
                            report_context, sup_deltas,
                            displaced_era.imp_supplier_contract, imp_ss_name)
                    else:
                        exp_ss_name = exp_ss = None

                    calcs.append((0, imp_ss_name, exp_ss_name, imp_ss, exp_ss))

                # Check if exp net deltas haven't been consumed
                sup_deltas = site_deltas['supply_deltas'][False]['net']
                if len(
                        list(t for t in sup_deltas['site']
                             if month_start <= t <= month_finish)) > 0:
                    ss_name = site.code + "_extra_net_export"
                    ss = SupplySource(sess,
                                      month_start,
                                      month_finish,
                                      forecast_from,
                                      displaced_era,
                                      False,
                                      report_context,
                                      era_maps=era_maps,
                                      deltas=sup_deltas)

                    calcs.append((0, None, ss_name, None, ss))

                site_ds = chellow.computer.SiteSource(sess,
                                                      site,
                                                      month_start,
                                                      month_finish,
                                                      forecast_from,
                                                      report_context,
                                                      displaced_era,
                                                      deltas=site_deltas)

                if displaced_era is not None and supply_id is None:
                    month_data = {}
                    for sname in ('import-net', 'export-net', 'import-gen',
                                  'export-gen', 'import-3rd-party',
                                  'export-3rd-party', 'msp', 'used',
                                  'used-3rd-party', 'billed-import-net'):
                        for xname in ('kwh', 'gbp'):
                            month_data[sname + '-' + xname] = 0

                    month_data['used-kwh'] = month_data['displaced-kwh'] = sum(
                        hh['msp-kwh'] for hh in site_ds.hh_data)

                    disp_supplier_contract = \
                        displaced_era.imp_supplier_contract
                    disp_vb_function = chellow.computer.contract_func(
                        report_context, disp_supplier_contract,
                        'displaced_virtual_bill')
                    if disp_vb_function is None:
                        raise BadRequest(
                            "The supplier contract " +
                            disp_supplier_contract.name +
                            " doesn't have the displaced_virtual_bill() "
                            "function.")
                    disp_vb_function(site_ds)
                    disp_supplier_bill = site_ds.supplier_bill

                    try:
                        gbp = disp_supplier_bill['net-gbp']
                    except KeyError:
                        disp_supplier_bill['problem'] += 'For the supply ' + \
                            site_ds.mpan_core + ' the virtual bill ' + \
                            str(disp_supplier_bill) + ' from the contract ' + \
                            disp_supplier_contract.name + \
                            ' does not contain the net-gbp key.'

                    month_data['used-gbp'] = month_data['displaced-gbp'] = \
                        site_ds.supplier_bill['net-gbp']

                    out = [
                        now, None, disp_supplier_contract.name, None, None,
                        displaced_era.meter_category, 'displaced', None, None,
                        None, None, site.code, site.name, '', month_finish
                    ] + [month_data[t] for t in summary_titles
                         ] + [None] + [None] * len(title_dict['mop']) + [
                             None
                         ] + [None] * len(title_dict['dc']) + [
                             None
                         ] + make_bill_row(title_dict['imp-supplier'],
                                           disp_supplier_bill)

                    era_rows.append(out)
                    for k, v in month_data.items():
                        site_month_data[k] += v

                for i, (order, imp_mpan_core, exp_mpan_core, imp_ss,
                        exp_ss) in enumerate(sorted(calcs, key=str)):
                    if imp_ss is None:
                        source_code = exp_ss.source_code
                        supply = exp_ss.supply
                    else:
                        source_code = imp_ss.source_code
                        supply = imp_ss.supply

                    site_sources.add(source_code)
                    month_data = {}
                    for name in ('import-net', 'export-net', 'import-gen',
                                 'export-gen', 'import-3rd-party',
                                 'export-3rd-party', 'displaced', 'used',
                                 'used-3rd-party', 'billed-import-net'):
                        for sname in ('kwh', 'gbp'):
                            month_data[name + '-' + sname] = 0

                    if imp_ss is not None:
                        imp_supplier_contract = imp_ss.supplier_contract
                        if imp_supplier_contract is not None:
                            import_vb_function = contract_func(
                                report_context, imp_supplier_contract,
                                'virtual_bill')
                            if import_vb_function is None:
                                raise BadRequest(
                                    "The supplier contract " +
                                    imp_supplier_contract.name +
                                    " doesn't have the virtual_bill() "
                                    "function.")
                            import_vb_function(imp_ss)

                        kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data)
                        imp_supplier_bill = imp_ss.supplier_bill

                        try:
                            gbp = imp_supplier_bill['net-gbp']
                        except KeyError:
                            gbp = 0
                            imp_supplier_bill['problem'] += \
                                'For the supply ' + \
                                imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['import-net-gbp'] += gbp
                            month_data['import-net-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['export-gen-kwh'] += kwh
                        elif source_code == '3rd-party':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == 'gen':
                            month_data['import-gen-kwh'] += kwh

                    if exp_ss is not None:
                        exp_supplier_contract = exp_ss.supplier_contract
                        if exp_supplier_contract is not None:
                            export_vb_function = contract_func(
                                report_context, exp_supplier_contract,
                                'virtual_bill')
                            export_vb_function(exp_ss)

                        kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data)
                        exp_supplier_bill = exp_ss.supplier_bill
                        try:
                            gbp = exp_supplier_bill['net-gbp']
                        except KeyError:
                            exp_supplier_bill['problem'] += \
                                'For the supply ' + imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['export-net-gbp'] += gbp
                            month_data['export-net-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['import-gen-kwh'] += kwh

                        elif source_code == '3rd-party':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == 'gen':
                            month_data['export-gen-kwh'] += kwh

                    sss = exp_ss if imp_ss is None else imp_ss
                    dc_contract = sss.dc_contract
                    if dc_contract is not None:
                        sss.contract_func(dc_contract, 'virtual_bill')(sss)
                    dc_bill = sss.dc_bill
                    gbp = dc_bill['net-gbp']

                    mop_contract = sss.mop_contract
                    if mop_contract is not None:
                        mop_bill_function = sss.contract_func(
                            mop_contract, 'virtual_bill')
                        mop_bill_function(sss)
                    mop_bill = sss.mop_bill
                    gbp += mop_bill['net-gbp']

                    if source_code in ('3rd-party', '3rd-party-reverse'):
                        month_data['import-3rd-party-gbp'] += gbp
                        month_data['used-3rd-party-gbp'] += gbp
                    else:
                        month_data['import-net-gbp'] += gbp
                    month_data['used-gbp'] += gbp

                    generator_type = sss.generator_type_code
                    if source_code in ('gen', 'gen-net'):
                        site_gen_types.add(generator_type)

                    era_category = sss.measurement_type
                    if CATEGORY_ORDER[site_category] < \
                            CATEGORY_ORDER[era_category]:
                        site_category = era_category

                    era_associates = set()
                    if mop_contract is not None:
                        era_associates.update({
                            s.site.code
                            for s in era.site_eras if not s.is_physical
                        })

                        for bill in sess.query(Bill).filter(
                                Bill.supply == supply,
                                Bill.start_date <= sss.finish_date,
                                Bill.finish_date >= sss.start_date):
                            bill_start = bill.start_date
                            bill_finish = bill.finish_date
                            bill_duration = (
                                bill_finish - bill_start).total_seconds() + \
                                (30 * 60)
                            overlap_duration = (
                                min(bill_finish, sss.finish_date) -
                                max(bill_start, sss.start_date)
                            ).total_seconds() + (30 * 60)
                            overlap_proportion = overlap_duration / \
                                bill_duration
                            month_data['billed-import-net-kwh'] += \
                                overlap_proportion * float(bill.kwh)
                            month_data['billed-import-net-gbp'] += \
                                overlap_proportion * float(bill.net)

                    if imp_ss is None:
                        imp_supplier_contract_name = None
                        pc_code = exp_ss.pc_code
                    else:
                        if imp_supplier_contract is None:
                            imp_supplier_contract_name = ''
                        else:
                            imp_supplier_contract_name = \
                                imp_supplier_contract.name
                        pc_code = imp_ss.pc_code

                    if exp_ss is None:
                        exp_supplier_contract_name = None
                    else:
                        if exp_supplier_contract is None:
                            exp_supplier_contract_name = ''
                        else:
                            exp_supplier_contract_name = \
                                exp_supplier_contract.name

                    out = [
                        now, imp_mpan_core, imp_supplier_contract_name,
                        exp_mpan_core, exp_supplier_contract_name,
                        era_category, source_code, generator_type,
                        sss.supply_name, sss.msn, pc_code, site.code,
                        site.name, ','.join(sorted(list(era_associates))),
                        month_finish] + [
                        month_data[t] for t in summary_titles] + [None] + \
                        make_bill_row(title_dict['mop'], mop_bill) + [None] + \
                        make_bill_row(title_dict['dc'], dc_bill)
                    if imp_ss is None:
                        out += [None] * (len(title_dict['imp-supplier']) + 1)
                    else:
                        out += [None] + make_bill_row(
                            title_dict['imp-supplier'], imp_supplier_bill)
                    if exp_ss is not None:
                        out += [None] + make_bill_row(
                            title_dict['exp-supplier'], exp_supplier_bill)

                    for k, v in month_data.items():
                        site_month_data[k] += v
                    era_rows.append(out)

                site_rows.append([
                    now, site.code, site.name, ', '.join(
                        s.code for s in site.find_linked_sites(
                            sess, month_start, month_finish)), month_finish,
                    site_category, ', '.join(sorted(list(site_sources))),
                    ', '.join(sorted(list(site_gen_types)))
                ] + [site_month_data[k] for k in summary_titles])
                sess.rollback()
            write_spreadsheet(rf, compression, site_rows, era_rows)
            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Exemple #31
0
def _make_site_deltas(sess, report_context, site, scenario_hh, forecast_from,
                      supply_id):
    site_scenario_hh = scenario_hh.get(site.code, {})

    site_deltas = {"hhs": {}}
    delts = site_deltas["supply_deltas"] = {}
    for is_import in (True, False):
        delts[is_import] = {}
        for src in ("gen", "net", "gen-net", "3rd-party", "3rd-party-reverse",
                    "sub"):
            delts[is_import][src] = {"site": {}}

    earliest_delta = to_utc(Datetime.max)
    latest_delta = to_utc(Datetime.min)

    found_hh = False
    for typ in ("used", "generated", "parasitic", "gen_net"):
        hh_str = site_scenario_hh.get(typ, "")
        hh_data = site_scenario_hh[typ] = {}
        for row in csv.reader(StringIO(hh_str)):
            cells = [cell.strip() for cell in row]
            if len("".join(cells)) == 0:
                continue

            if len(cells) != 2:
                raise BadRequest("Can't interpret the row " + str(cells) +
                                 " it should be of the form 'timestamp, kWh'")

            date_str, kwh_str = cells
            ts = parse_hh_start(date_str)
            earliest_delta = min(ts, earliest_delta)
            latest_delta = max(ts, latest_delta)
            try:
                hh_data[ts] = float(kwh_str)
            except ValueError as e:
                raise BadRequest("When looking at " + typ +
                                 " hh data, can't parse the "
                                 "kWh at " + date_str + ": " + str(e))
            found_hh = True

    if not found_hh:
        return site_deltas

    scenario_used = site_scenario_hh["used"]
    scenario_generated = site_scenario_hh["generated"]
    scenario_parasitic = site_scenario_hh["parasitic"]
    scenario_gen_net = site_scenario_hh["gen_net"]

    earliest_delta_ct = to_ct(earliest_delta)
    for month_start, month_finish in c_months_u(earliest_delta_ct.year,
                                                earliest_delta_ct.month,
                                                months=None):
        if month_start > latest_delta:
            break
        chunk_start = hh_max(month_start, earliest_delta)
        chunk_finish = hh_min(month_finish, latest_delta)

        site_ds = chellow.computer.SiteSource(sess, site, chunk_start,
                                              chunk_finish, forecast_from,
                                              report_context)
        hh_map = dict((h["start-date"], h) for h in site_ds.hh_data)

        for era in (sess.query(Era).join(SiteEra).join(Pc).filter(
                SiteEra.site == site,
                SiteEra.is_physical == true(),
                Era.imp_mpan_core != null(),
                Pc.code != "00",
                Era.start_date <= chunk_finish,
                or_(Era.finish_date == null(), Era.finish_date >= chunk_start),
        )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              True, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                sdatum["import-net-kwh"] += hh["msp-kwh"]
                sdatum["used-kwh"] += hh["msp-kwh"]

        for era in (sess.query(Era).join(SiteEra).join(Pc).join(Supply).join(
                Source).filter(
                    SiteEra.site == site,
                    SiteEra.is_physical == true(),
                    Era.imp_mpan_core != null(),
                    Era.start_date <= chunk_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= chunk_start),
                    Source.code == "gen-net",
                )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              False, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                try:
                    sdatum["gen-net-kwh"] += hh["msp-kwh"]
                except KeyError:
                    sdatum["gen-net-kwh"] = hh["msp-kwh"]

        for hh_start, hh in hh_map.items():
            if hh_start in scenario_used:
                used_delt = scenario_used[hh_start] - hh["used-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if used_delt < 0:
                    diff = hh["import-net-kwh"] + used_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += used_delt
                else:
                    diff = hh["export-net-kwh"] - used_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= used_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["used-kwh"] += used_delt
                hh["msp-kwh"] -= exp_net_delt

            if hh_start in scenario_generated:
                imp_gen_delt = scenario_generated[hh_start] - hh[
                    "import-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if imp_gen_delt < 0:
                    diff = hh["export-net-kwh"] + imp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt += imp_gen_delt
                else:
                    diff = hh["import-net-kwh"] - imp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt -= imp_gen_delt

                try:
                    delts[True]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[True]["gen"]["site"][hh_start] = imp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["import-gen-kwh"] += imp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_parasitic:
                exp_gen_delt = scenario_parasitic[hh_start] - hh[
                    "export-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if exp_gen_delt < 0:
                    diff = hh["import-net-kwh"] + exp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += exp_gen_delt
                else:
                    diff = hh["export-net-kwh"] - exp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= exp_gen_delt

                try:
                    delts[False]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[False]["gen"]["site"][hh_start] = exp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["export-gen-kwh"] += exp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_gen_net:
                gen_net_delt = scenario_gen_net[hh_start] - hh["gen-net-kwh"]

                try:
                    delts[False]["gen-net"]["site"][hh_start] += gen_net_delt
                except KeyError:
                    delts[False]["gen-net"]["site"][hh_start] = gen_net_delt

                hh["import-gen-kwh"] += gen_net_delt
                hh["export-net-kwh"] += gen_net_delt

            site_deltas["hhs"][hh_start] = hh

    sup_deltas = site_deltas["supply_deltas"][False]["net"]["site"]
    if all(v == 0 for v in sup_deltas.values()):
        sup_deltas.clear()

    return site_deltas
Exemple #32
0
def content(year, month, months, supply_id, user):
    tmp_file = sess = None
    try:
        sess = Session()
        supplies = sess.query(Supply).join(Era).distinct().options(
            joinedload(Supply.generator_type))

        if supply_id is None:
            base_name = "supplies_monthly_duration_for_all_supplies_for_" + \
                str(months) + "_to_" + str(year) + "_" + str(month) + ".csv"
        else:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Supply.id == supply.id)
            base_name = "supplies_monthly_duration_for_" + str(supply.id) + \
                "_" + str(months) + "_to_" + str(year) + "_" + str(month) + \
                ".csv"
        running_name, finished_name = chellow.dloads.make_names(
            base_name, user)

        tmp_file = open(running_name, "w")

        caches = {}

        start_date = Datetime(year, month, 1, tzinfo=pytz.utc) - \
            relativedelta(months=months-1)

        field_names = ('supply-name', 'source-code', 'generator-type', 'month',
                       'pc-code', 'msn', 'site-code', 'site-name',
                       'metering-type', 'import-mpan-core',
                       'metered-import-kwh', 'metered-import-net-gbp',
                       'metered-import-estimated-kwh', 'billed-import-kwh',
                       'billed-import-net-gbp', 'export-mpan-core',
                       'metered-export-kwh', 'metered-export-estimated-kwh',
                       'billed-export-kwh', 'billed-export-net-gbp', 'problem',
                       'timestamp')

        tmp_file.write('supply-id,' + ','.join(field_names) + '\n')

        forecast_date = chellow.computer.forecast_date()

        for i in range(months):
            month_start = start_date + relativedelta(months=i)
            month_finish = month_start + relativedelta(months=1) - HH

            for supply in supplies.filter(
                    Era.start_date <= month_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= month_start)):

                generator_type = supply.generator_type
                if generator_type is None:
                    generator_type = ''
                else:
                    generator_type = generator_type.code

                source_code = supply.source.code
                eras = supply.find_eras(sess, month_start, month_finish)
                era = eras[-1]
                metering_type = era.meter_category

                site = sess.query(Site).join(SiteEra).filter(
                    SiteEra.era == era, SiteEra.is_physical == true()).one()

                values = {
                    'supply-name': supply.name,
                    'source-code': source_code,
                    'generator-type': generator_type,
                    'month': hh_format(month_finish),
                    'pc-code': era.pc.code,
                    'msn': era.msn,
                    'site-code': site.code,
                    'site-name': site.name,
                    'metering-type': metering_type,
                    'problem': ''
                }

                tmp_file.write(str(supply.id) + ',')

                for is_import, pol_name in [(True, 'import'),
                                            (False, 'export')]:
                    if is_import:
                        mpan_core = era.imp_mpan_core
                    else:
                        mpan_core = era.exp_mpan_core

                    values[pol_name + '-mpan-core'] = mpan_core
                    kwh = 0
                    est_kwh = 0

                    if metering_type in ['hh', 'amr']:
                        est_kwh = sess.query(HhDatum.value).join(Channel) \
                            .join(Era).filter(
                                HhDatum.status == 'E',
                                Era.supply_id == supply.id,
                                Channel.channel_type == 'ACTIVE',
                                Channel.imp_related == is_import,
                                HhDatum.start_date >= month_start,
                                HhDatum.start_date <= month_finish).first()
                        if est_kwh is None:
                            est_kwh = 0
                        else:
                            est_kwh = est_kwh[0]

                    if not (is_import and source_code in ('net', 'gen-net')):
                        kwh_sum = sess.query(
                            cast(func.sum(HhDatum.value),
                                 Float)).join(Channel).join(Era).filter(
                                     Era.supply_id == supply.id,
                                     Channel.channel_type == 'ACTIVE',
                                     Channel.imp_related == is_import,
                                     HhDatum.start_date >= month_start,
                                     HhDatum.start_date <=
                                     month_finish).one()[0]
                        if kwh_sum is not None:
                            kwh += kwh_sum

                    values['metered-' + pol_name + '-estimated-kwh'] = est_kwh
                    values['metered-' + pol_name + '-kwh'] = kwh
                    values['metered-' + pol_name + '-net-gbp'] = 0
                    values['billed-' + pol_name + '-kwh'] = 0
                    values['billed-' + pol_name + '-net-gbp'] = 0
                    values['billed-' + pol_name + '-apportioned-kwh'] = 0
                    values['billed-' + pol_name + '-apportioned-net-gbp'] = 0
                    values['billed-' + pol_name + '-raw-kwh'] = 0
                    values['billed-' + pol_name + '-raw-net-gbp'] = 0

                for bill in sess.query(Bill).filter(
                        Bill.supply == supply, Bill.start_date <= month_finish,
                        Bill.finish_date >= month_start):
                    bill_start = bill.start_date
                    bill_finish = bill.finish_date
                    bill_duration = (bill_finish -
                                     bill_start).total_seconds() + 30 * 60
                    overlap_duration = (min(bill_finish, month_finish) - max(
                        bill_start, month_start)).total_seconds() + 30 * 60
                    overlap_proportion = float(overlap_duration) / float(
                        bill_duration)
                    values['billed-import-net-gbp'] += \
                        overlap_proportion * float(bill.net)
                    values['billed-import-kwh'] += \
                        overlap_proportion * float(bill.kwh)

                for era in eras:
                    chunk_start = hh_max(era.start_date, month_start)
                    chunk_finish = hh_min(era.finish_date, month_finish)

                    import_mpan_core = era.imp_mpan_core
                    if import_mpan_core is None:
                        continue

                    supplier_contract = era.imp_supplier_contract

                    if source_code in ['net', 'gen-net', '3rd-party']:
                        supply_source = chellow.computer.SupplySource(
                            sess, chunk_start, chunk_finish, forecast_date,
                            era, True, caches)

                        values['metered-import-kwh'] += sum(
                            datum['msp-kwh']
                            for datum in supply_source.hh_data)

                        import_vb_function = supply_source.contract_func(
                            supplier_contract, 'virtual_bill')
                        if import_vb_function is None:
                            values['problem'] += "Can't find the " \
                                "virtual_bill  function in the supplier " \
                                "contract. "
                        else:
                            import_vb_function(supply_source)
                            values['metered-import-net-gbp'] += \
                                supply_source.supplier_bill['net-gbp']

                        supply_source.contract_func(
                            era.dc_contract, 'virtual_bill')(supply_source)
                        values['metered-import-net-gbp'] += \
                            supply_source.dc_bill['net-gbp']

                        mop_func = supply_source.contract_func(
                            era.mop_contract, 'virtual_bill')
                        if mop_func is None:
                            values['problem'] += " MOP virtual_bill " \
                                "function can't be found."
                        else:
                            mop_func(supply_source)
                            mop_bill = supply_source.mop_bill
                            values['metered-import-net-gbp'] += \
                                mop_bill['net-gbp']
                            if len(mop_bill['problem']) > 0:
                                values['problem'] += \
                                    " MOP virtual bill problem: " + \
                                    mop_bill['problem']

                values['timestamp'] = int(time.time() * 1000)
                tmp_file.write(','.join('"' + str(values[name]) + '"'
                                        for name in field_names) + '\n')
    except BaseException:
        tmp_file.write(traceback.format_exc())
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)
Exemple #33
0
def content(year, supply_id, user):
    f = sess = None
    try:
        sess = Session()
        fname = ['crc', str(year), str(year + 1)]
        if supply_id is None:
            fname.append('all_supplies')
        else:
            fname.append('supply_' + str(supply_id))
        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(fname) + '.csv', user)
        f = open(running_name, mode='w', newline='')
        w = csv.writer(f, lineterminator='\n')

        ACTUAL_READ_TYPES = ['N', 'N3', 'C', 'X', 'CP']
        w.writerow(
            (
                'Chellow Supply Id', 'Report Start', 'Report Finish',
                'MPAN Core', 'Site Id', 'Site Name', 'From', 'To',
                'NHH Breakdown', 'Actual HH Normal Days',
                'Actual AMR Normal Days', 'Actual NHH Normal Days',
                'Actual Unmetered Normal Days', 'Max HH Normal Days',
                'Max AMR Normal Days', 'Max NHH Normal Days',
                'Max Unmetered Normal Days', 'Total Actual Normal Days',
                'Total Max Normal Days', 'Data Type', 'HH kWh', 'AMR kWh',
                'NHH kWh', 'Unmetered kwh', 'HH Filled kWh', 'AMR Filled kWh',
                'Total kWh', 'Note'))

        year_start = Datetime(year, 4, 1, tzinfo=pytz.utc)
        year_finish = year_start + relativedelta(years=1) - HH

        supplies = sess.query(Supply).join(Era).join(Source).filter(
            Source.code.in_(('net', 'gen-net')), Era.imp_mpan_core != null(),
            Era.start_date <= year_finish, or_(
                Era.finish_date == null(),
                Era.finish_date >= year_start)).distinct().order_by(Supply.id)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Supply.id == supply.id)

        meter_types = ('hh', 'amr', 'nhh', 'unmetered')

        for supply in supplies:
            total_kwh = dict([(mtype, 0) for mtype in meter_types])
            filled_kwh = dict([(mtype, 0) for mtype in ('hh', 'amr')])
            normal_days = dict([(mtype, 0) for mtype in meter_types])
            max_normal_days = dict([(mtype, 0) for mtype in meter_types])

            breakdown = ''
            eras = sess.query(Era).filter(
                Era.supply == supply, Era.start_date <= year_finish, or_(
                    Era.finish_date == null(),
                    Era.finish_date >= year_start)).order_by(
                Era.start_date).all()
            supply_from = hh_max(eras[0].start_date, year_start)
            supply_to = hh_min(eras[-1].finish_date, year_finish)

            for era in eras:

                meter_type = era.meter_category

                period_start = hh_max(era.start_date, year_start)
                period_finish = hh_min(era.finish_date, year_finish)

                max_normal_days[meter_type] += (
                    (period_finish - period_start).total_seconds() +
                    60 * 30) / (60 * 60 * 24)

                mpan_core = era.imp_mpan_core
                site = sess.query(Site).join(SiteEra).filter(
                    SiteEra.is_physical == true(),
                    SiteEra.era_id == era.id).one()

                if meter_type == 'nhh':

                    read_list = []
                    read_keys = {}
                    pairs = []

                    prior_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.present_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.present_date < period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.present_date.desc()))
                    prior_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.previous_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.previous_date < period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.previous_date.desc()))
                    next_pres_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType)
                        .join(RegisterRead.present_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.present_date >= period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.present_date))
                    next_prev_reads = iter(
                        sess.query(RegisterRead).join(Bill).join(BillType).
                        join(RegisterRead.previous_type).filter(
                            RegisterRead.units == 0,
                            ReadType.code.in_(ACTUAL_READ_TYPES),
                            Bill.supply == supply,
                            RegisterRead.previous_date >= period_start,
                            BillType.code != 'W').order_by(
                            RegisterRead.previous_date))

                    for is_forwards in [False, True]:
                        if is_forwards:
                            pres_reads = next_pres_reads
                            prev_reads = next_prev_reads
                            read_list.reverse()
                        else:
                            pres_reads = prior_pres_reads
                            prev_reads = prior_prev_reads

                        prime_pres_read = None
                        prime_prev_read = None
                        while True:
                            while prime_pres_read is None:
                                try:
                                    pres_read = next(pres_reads)
                                except StopIteration:
                                    break

                                pres_date = pres_read.present_date
                                pres_msn = pres_read.msn
                                read_key = '_'.join([str(pres_date), pres_msn])
                                if read_key in read_keys:
                                    continue

                                pres_bill = sess.query(Bill).join(BillType). \
                                    filter(
                                        Bill.reads.any(),
                                        Bill.supply == supply,
                                        Bill.finish_date >=
                                        pres_read.bill.start_date,
                                        Bill.start_date <=
                                        pres_read.bill.finish_date,
                                        BillType.code != 'W').order_by(
                                        Bill.issue_date.desc(),
                                        BillType.code).first()
                                if pres_bill != pres_read.bill:
                                    continue

                                reads = dict(
                                    (
                                        read.tpr.code,
                                        float(read.present_value) *
                                        float(read.coefficient))
                                    for read in sess.query(RegisterRead).
                                    filter(
                                        RegisterRead.units == 0,
                                        RegisterRead.bill == pres_bill,
                                        RegisterRead.present_date == pres_date,
                                        RegisterRead.msn == pres_msn))

                                prime_pres_read = {
                                    'date': pres_date, 'reads': reads,
                                    'msn': pres_msn}
                                read_keys[read_key] = None
                            while prime_prev_read is None:
                                try:
                                    prev_read = next(prev_reads)
                                except StopIteration:
                                    break

                                prev_date = prev_read.previous_date
                                prev_msn = prev_read.msn
                                read_key = '_'.join([str(prev_date), prev_msn])
                                if read_key in read_keys:
                                    continue
                                prev_bill = sess.query(Bill).join(BillType). \
                                    filter(
                                        Bill.reads.any(),
                                        Bill.supply_id == supply.id,
                                        Bill.finish_date >=
                                        prev_read.bill.start_date,
                                        Bill.start_date <=
                                        prev_read.bill.finish_date,
                                        BillType.code != 'W').order_by(
                                        Bill.issue_date.desc(),
                                        BillType.code).first()
                                if prev_bill != prev_read.bill:
                                    continue

                                reads = dict(
                                    (
                                        read.tpr.code,
                                        float(read.previous_value) *
                                        float(read.coefficient))
                                    for read in sess.query(RegisterRead).
                                    filter(
                                        RegisterRead.units == 0,
                                        RegisterRead.bill_id == prev_bill.id,
                                        RegisterRead.previous_date ==
                                        prev_date,
                                        RegisterRead.msn == prev_msn))

                                prime_prev_read = {
                                    'date': prev_date, 'reads': reads,
                                    'msn': prev_msn}
                                read_keys[read_key] = None

                            if prime_pres_read is None and \
                                    prime_prev_read is None:
                                break
                            elif prime_pres_read is None:
                                read_list.append(prime_prev_read)
                                prime_prev_read = None
                            elif prime_prev_read is None:
                                read_list.append(prime_pres_read)
                                prime_pres_read = None
                            else:
                                if is_forwards:
                                    if prime_pres_read['date'] <= \
                                            prime_prev_read['date']:
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None
                                    else:
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                else:
                                    if prime_prev_read['date'] >= \
                                            prime_pres_read['date']:
                                        read_list.append(prime_prev_read)
                                        prime_prev_read = None
                                    else:
                                        read_list.append(prime_pres_read)
                                        prime_pres_read = None

                            if len(read_list) > 1:
                                if is_forwards:
                                    aft_read = read_list[-2]
                                    fore_read = read_list[-1]
                                else:
                                    aft_read = read_list[-1]
                                    fore_read = read_list[-2]

                                if aft_read['msn'] == fore_read['msn'] and \
                                        set(aft_read['reads'].keys()) == \
                                        set(fore_read['reads'].keys()):
                                    pair_start_date = aft_read['date'] + HH
                                    pair_finish_date = fore_read['date']

                                    num_hh = (
                                        (
                                            pair_finish_date + HH -
                                            pair_start_date).total_seconds()
                                        ) / (30 * 60)

                                    tprs = {}
                                    for tpr_code, initial_val in \
                                            aft_read['reads'].items():
                                        end_val = fore_read['reads'][tpr_code]

                                        kwh = end_val - initial_val

                                        if kwh < 0:
                                            digits = int(
                                                math.log10(initial_val)) + 1
                                            kwh = 10 ** digits + kwh

                                        tprs[tpr_code] = kwh / num_hh

                                    pairs.append(
                                        {
                                            'start-date': pair_start_date,
                                            'finish-date': pair_finish_date,
                                            'tprs': tprs})

                                    if len(pairs) > 0 and (
                                            not is_forwards or (
                                                is_forwards and
                                                read_list[-1]['date'] >
                                                period_finish)):
                                        break

                    breakdown += 'read list - \n' + str(read_list) + "\n"
                    if len(pairs) == 0:
                        pairs.append(
                            {
                                'start-date': period_start,
                                'finish-date': period_finish,
                                'tprs': {'00001': 0}})
                    else:
                        for pair in pairs:
                            pair_start = pair['start-date']
                            pair_finish = pair['finish-date']
                            if pair_start >= year_start and \
                                    pair_finish <= year_finish:
                                block_start = hh_max(pair_start, period_start)
                                block_finish = hh_min(
                                    pair_finish, period_finish)

                                if block_start <= block_finish:
                                    normal_days[meter_type] += (
                                        (
                                            block_finish - block_start
                                        ).total_seconds() +
                                        60 * 30) / (60 * 60 * 24)

                    # smooth
                    for i in range(1, len(pairs)):
                        pairs[i - 1]['finish-date'] = pairs[i]['start-date'] \
                            - HH

                    # stretch
                    if pairs[0]['start-date'] > period_start:
                        pairs[0]['start-date'] = period_start

                    if pairs[-1]['finish-date'] < period_finish:
                        pairs[-1]['finish-date'] = period_finish

                    # chop
                    pairs = [
                        pair for pair in pairs
                        if not pair['start-date'] > period_finish and
                        not pair['finish-date'] < period_start]

                    # squash
                    if pairs[0]['start-date'] < period_start:
                        pairs[0]['start-date'] = period_start

                    if pairs[-1]['finish-date'] > period_finish:
                        pairs[-1]['finish-date'] = period_finish

                    for pair in pairs:
                        pair_hhs = (
                            (
                                pair['finish-date'] - pair['start-date']
                            ).total_seconds() + 30 * 60) / (60 * 30)
                        pair['pair_hhs'] = pair_hhs
                        for tpr_code, pair_kwh in pair['tprs'].items():
                            total_kwh[meter_type] += pair_kwh * pair_hhs

                    breakdown += 'pairs - \n' + str(pairs)

                elif meter_type in ('hh', 'amr'):
                    period_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).
                        join(Channel).filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Channel.era == era,
                            HhDatum.start_date >= period_start,
                            HhDatum.start_date <= period_finish).order_by(
                                HhDatum.id))
                    year_kwhs = list(
                        float(v[0]) for v in sess.query(HhDatum.value).
                        join(Channel).join(Era).filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Era.supply == supply,
                            HhDatum.start_date >= year_start,
                            HhDatum.start_date <= year_finish).order_by(
                                HhDatum.id))

                    period_sum_kwhs = sum(period_kwhs)
                    year_sum_kwhs = sum(year_kwhs)
                    period_len_kwhs = len(period_kwhs)
                    year_len_kwhs = len(year_kwhs)
                    total_kwh[meter_type] += period_sum_kwhs
                    period_hhs = (
                        period_finish + HH - period_start
                        ).total_seconds() / (60 * 30)
                    if year_len_kwhs > 0:
                        filled_kwh[meter_type] += year_sum_kwhs / \
                            year_len_kwhs * (period_hhs - period_len_kwhs)
                    normal_days[meter_type] += sess.query(
                        func.count(HhDatum.value)).join(Channel). \
                        filter(
                            Channel.imp_related == true(),
                            Channel.channel_type == 'ACTIVE',
                            Channel.era == era,
                            HhDatum.start_date >= period_start,
                            HhDatum.start_date <= period_finish,
                            HhDatum.status == 'A').one()[0] / 48
                elif meter_type == 'unmetered':
                    year_seconds = (
                        year_finish - year_start).total_seconds() + 60 * 30
                    period_seconds = (
                        period_finish - period_start).total_seconds() + 60 * 30

                    total_kwh[meter_type] += era.imp_sc * period_seconds / \
                        year_seconds

                    normal_days[meter_type] += period_seconds / (60 * 60 * 24)

            # for full year 183
            total_normal_days = sum(normal_days.values())
            total_max_normal_days = sum(max_normal_days.values())
            is_normal = total_normal_days / total_max_normal_days >= 183 / 365

            w.writerow(
                [
                    supply.id, hh_format(year_start), hh_format(year_finish),
                    mpan_core, site.code, site.name, hh_format(supply_from),
                    hh_format(supply_to), breakdown] +
                [
                    normal_days[t] for t in meter_types] + [
                    max_normal_days[t] for t in meter_types] + [
                    total_normal_days, total_max_normal_days,
                    "Actual" if is_normal else "Estimated"] +
                [total_kwh[t] for t in meter_types] +
                [filled_kwh[t] for t in ('hh', 'amr')] +
                [sum(total_kwh.values()) + sum(filled_kwh.values()), ''])

            # avoid a long running transaction
            sess.rollback()
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        f.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #34
0
def content(supply_id, start_date, finish_date, user):
    forecast_date = datetime.datetime.max.replace(tzinfo=pytz.utc)
    caches = {}
    f = sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'supplies_duration.csv', user)
        f = open(running_name, "w")
        f.write(
            ','.join(
                (
                    "Supply Id", "Supply Name", "Source", "Generator Type",
                    "Site Ids", "Site Names", "From", "To", "PC", "MTC", "CoP",
                    "SSC", "Normal Reads", "Type", "Import LLFC",
                    "Import MPAN Core", "Import Supply Capacity",
                    "Import Supplier", "Import Total MSP kWh",
                    "Import Non-actual MSP kWh", "Import Total GSP kWh",
                    "Import MD / kW", "Import MD Date", "Import MD / kVA",
                    "Import Bad HHs", "Export LLFC", "Export MPAN Core",
                    "Export Supply Capacity", "Export Supplier",
                    "Export Total MSP kWh", "Export Non-actual MSP kWh",
                    "Export GSP kWh", "Export MD / kW", "Export MD Date",
                    "Export MD / kVA", "Export Bad HHs")))

        supplies = sess.query(Supply).join(Era).filter(
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
            Era.start_date <= finish_date).order_by(Supply.id).distinct()

        if supply_id is not None:
            supplies = supplies.filter(
                Supply.id == Supply.get_by_id(sess, supply_id).id)

        for supply in supplies:
            site_codes = ''
            site_names = ''
            eras = supply.find_eras(sess, start_date, finish_date)

            era = eras[-1]
            for site_era in era.site_eras:
                site = site_era.site
                site_codes = site_codes + site.code + ', '
                site_names = site_names + site.name + ', '
            site_codes = site_codes[:-2]
            site_names = site_names[:-2]

            if supply.generator_type is None:
                generator_type = ''
            else:
                generator_type = supply.generator_type.code

            ssc = era.ssc
            ssc_code = '' if ssc is None else ssc.code

            prime_reads = set()
            for read, rdate in chain(
                    sess.query(
                        RegisterRead, RegisterRead.previous_date).join(
                        RegisterRead.previous_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.previous_date >= start_date,
                    RegisterRead.previous_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES)),

                    sess.query(
                        RegisterRead, RegisterRead.present_date).join(
                        RegisterRead.present_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.present_date >= start_date,
                    RegisterRead.present_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES))):
                prime_bill = sess.query(Bill).join(BillType).filter(
                    Bill.supply == supply,
                    Bill.start_date <= read.bill.finish_date,
                    Bill.finish_date >= read.bill.start_date,
                    Bill.reads.any()).order_by(
                    Bill.issue_date.desc(), BillType.code).first()
                if prime_bill.id == read.bill.id:
                    prime_reads.add(
                        str(rdate) + "_" + read.msn)

            supply_type = era.make_meter_category()

            chunk_start = hh_max(eras[0].start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            num_hh = int(
                (chunk_finish - (chunk_start - HH)).total_seconds() /
                (30 * 60))

            f.write(
                '\n' + ','.join(
                    ('"' + str(value) + '"') for value in [
                        supply.id, supply.name, supply.source.code,
                        generator_type, site_codes, site_names,
                        hh_format(start_date), hh_format(finish_date),
                        era.pc.code, era.mtc.code, era.cop.code, ssc_code,
                        len(prime_reads), supply_type]) + ',')
            f.write(
                mpan_bit(
                    sess, supply, True, num_hh, eras, chunk_start,
                    chunk_finish, forecast_date, caches) + "," +
                mpan_bit(
                    sess, supply, False, num_hh, eras, chunk_start,
                    chunk_finish, forecast_date, caches))
    except:
        f.write(traceback.format_exc())
    finally:
        sess.close()
        f.close()
        os.rename(running_name, finished_name)
Exemple #35
0
def _process_g_bill_ids(
    sess,
    report_context,
    g_bill_ids,
    forecast_date,
    bill_titles,
    vbf,
    titles,
    csv_writer,
):

    g_bill_id = list(sorted(g_bill_ids))[0]
    g_bill_ids.remove(g_bill_id)
    g_bill = sess.query(GBill).filter(GBill.id == g_bill_id).one()
    problem = ""
    g_supply = g_bill.g_supply
    read_dict = defaultdict(set)
    for g_read in g_bill.g_reads:
        if not all(g_read.msn == era.msn for era in g_supply.find_g_eras(
                sess, g_read.prev_date, g_read.pres_date)):
            problem += (
                f"The MSN {g_read.msn} of the register read {g_read.id} doesn't match "
                f"the MSN of all the relevant eras.")

        for dt, typ in [
            (g_read.pres_date, g_read.pres_type),
            (g_read.prev_date, g_read.prev_type),
        ]:
            typ_set = read_dict[str(dt) + "-" + g_read.msn]
            typ_set.add(typ)
            if len(typ_set) > 1:
                problem += f" Reads taken on {dt} have differing read types."

    vals = {
        "covered_vat_gbp": Decimal("0.00"),
        "covered_net_gbp": Decimal("0.00"),
        "covered_gross_gbp": Decimal("0.00"),
        "covered_kwh": Decimal(0),
        "covered_start": g_bill.start_date,
        "covered_finish": g_bill.finish_date,
        "covered_bill_ids": [],
    }

    covered_primary_bill = None
    enlarged = True

    while enlarged:
        enlarged = False
        covered_bills = OrderedDict(
            (b.id, b) for b in sess.query(GBill).filter(
                GBill.g_supply == g_supply,
                GBill.start_date <= vals["covered_finish"],
                GBill.finish_date >= vals["covered_start"],
            ).order_by(GBill.issue_date.desc(), GBill.start_date))

        num_covered = None
        while num_covered != len(covered_bills):
            num_covered = len(covered_bills)
            for a, b in combinations(tuple(covered_bills.values()), 2):
                if all((
                        a.start_date == b.start_date,
                        a.finish_date == b.finish_date,
                        a.kwh == -1 * b.kwh,
                        a.net == -1 * b.net,
                        a.vat == -1 * b.vat,
                        a.gross == -1 * b.gross,
                )):
                    for gb_id in a.id, b.id:
                        del covered_bills[gb_id]
                        if gb_id in g_bill_ids:
                            g_bill_ids.remove(gb_id)
                    break

        for covered_bill in covered_bills.values():
            if covered_primary_bill is None and len(covered_bill.g_reads) > 0:
                covered_primary_bill = covered_bill
            if covered_bill.start_date < vals["covered_start"]:
                vals["covered_start"] = covered_bill.start_date
                enlarged = True
                break
            if covered_bill.finish_date > vals["covered_finish"]:
                vals["covered_finish"] = covered_bill.finish_date
                enlarged = True
                break

    if len(covered_bills) == 0:
        return

    for covered_bill in covered_bills.values():
        if covered_bill.id in g_bill_ids:
            g_bill_ids.remove(covered_bill.id)
        vals["covered_bill_ids"].append(covered_bill.id)
        bdown = covered_bill.make_breakdown()
        vals["covered_kwh"] += covered_bill.kwh
        vals["covered_net_gbp"] += covered_bill.net
        vals["covered_vat_gbp"] += covered_bill.vat
        vals["covered_gross_gbp"] += covered_bill.gross
        for title in bill_titles:
            k = "covered_" + title
            v = bdown.get(title)

            if v is not None:
                if isinstance(v, list):
                    if k not in vals:
                        vals[k] = set()
                    vals[k].update(set(v))
                else:
                    try:
                        vals[k] += v
                    except KeyError:
                        vals[k] = v
                    except TypeError:
                        raise BadRequest(
                            f"Problem with bill {g_bill.id} and key {k} and value {v} "
                            f"for existing {vals[k]}")

            if title in (
                    "correction_factor",
                    "calorific_value",
                    "unit_code",
                    "unit_factor",
            ):
                if k not in vals:
                    vals[k] = set()
                for g_read in covered_bill.g_reads:
                    if title in ("unit_code", "unit_factor"):
                        g_unit = g_read.g_unit
                        if title == "unit_code":
                            v = g_unit.code
                        else:
                            v = g_unit.factor
                    else:
                        v = getattr(g_read, title)
                    vals[k].add(v)

    for g_era in (sess.query(GEra).filter(
            GEra.g_supply == g_supply,
            GEra.start_date <= vals["covered_finish"],
            or_(GEra.finish_date == null(),
                GEra.finish_date >= vals["covered_start"]),
    ).distinct()):
        site = (sess.query(Site).join(SiteGEra).filter(
            SiteGEra.is_physical == true(), SiteGEra.g_era == g_era).one())

        chunk_start = hh_max(vals["covered_start"], g_era.start_date)
        chunk_finish = hh_min(vals["covered_finish"], g_era.finish_date)

        data_source = chellow.g_engine.GDataSource(
            sess,
            chunk_start,
            chunk_finish,
            forecast_date,
            g_era,
            report_context,
            covered_primary_bill,
        )

        vbf(data_source)

        for k, v in data_source.bill.items():
            vk = "virtual_" + k
            try:
                if isinstance(v, set):
                    vals[vk].update(v)
                else:
                    vals[vk] += v
            except KeyError:
                vals[vk] = v
            except TypeError as detail:
                raise BadRequest(f"For key {vk} and value {v}. {detail}")

    if g_bill.id not in covered_bills.keys():
        g_bill = covered_bills[sorted(covered_bills.keys())[0]]

    vals["batch"] = g_bill.g_batch.reference
    vals["bill_reference"] = g_bill.reference
    vals["bill_type"] = g_bill.bill_type.code
    vals["bill_start_date"] = g_bill.start_date
    vals["bill_finish_date"] = g_bill.finish_date
    vals["mprn"] = g_supply.mprn
    vals["supply_name"] = g_supply.name
    vals["site_code"] = site.code
    vals["site_name"] = site.name

    for k, v in vals.items():
        if k == "covered_bill_ids":
            vals[k] = " | ".join(str(b) for b in v)
        else:
            vals[k] = csv_make_val(v)

    for i, title in enumerate(titles):
        if title.startswith("difference_"):
            try:
                covered_val = float(vals[titles[i - 2]])
                virtual_val = float(vals[titles[i - 1]])
                vals[title] = covered_val - virtual_val
            except KeyError:
                vals[title] = None

    csv_writer.writerow([(vals.get(k) if vals.get(k) is not None else "")
                         for k in titles])
Exemple #36
0
def mpan_bit(
        sess, supply, is_import, num_hh, eras, chunk_start, chunk_finish,
        forecast_date, caches):
    mpan_core_str = ''
    llfc_code = ''
    sc_str = ''
    supplier_contract_name = ''
    gsp_kwh = ''
    for era in eras:
        mpan_core = era.imp_mpan_core if is_import else era.exp_mpan_core
        if mpan_core is None:
            continue
        mpan_core_str = mpan_core
        if is_import:
            supplier_contract_name = era.imp_supplier_contract.name
            llfc = era.imp_llfc
            sc = era.imp_sc
        else:
            supplier_contract_name = era.exp_supplier_contract.name
            llfc = era.exp_llfc
            sc = era.exp_sc
        llfc_code = llfc.code
        sc_str = str(sc)
        if llfc.is_import and era.pc.code == '00' and \
                supply.source.code not in ('gen') and \
                supply.dno_contract.name != '99':
            if gsp_kwh == '':
                gsp_kwh = 0

            block_start = hh_max(era.start_date, chunk_start)
            block_finish = hh_min(era.finish_date, chunk_finish)

            supply_source = chellow.computer.SupplySource(
                sess, block_start, block_finish, forecast_date, era, is_import,
                caches)

            chellow.duos.duos_vb(supply_source)

            gsp_kwh += sum(datum['gsp-kwh'] for datum in supply_source.hh_data)

    md = 0
    sum_kwh = 0
    non_actual = 0
    date_at_md = None
    kvarh_at_md = None
    num_na = 0

    for datum in sess.query(HhDatum).join(Channel).join(Era).filter(
            Era.supply == supply, Channel.imp_related == is_import,
            Channel.channel_type == 'ACTIVE',
            HhDatum.start_date >= chunk_start,
            HhDatum.start_date <= chunk_finish).order_by(HhDatum.id):
        hh_value = float(datum.value)
        hh_status = datum.status
        if hh_value > md:
            md = hh_value
            date_at_md = datum.start_date
            kvarh_at_md = sess.query(
                cast(func.max(HhDatum.value), Float)).join(
                Channel).join(Era).filter(
                Era.supply == supply,
                Channel.imp_related == is_import,
                Channel.channel_type != 'ACTIVE',
                HhDatum.start_date == date_at_md).one()[0]

        sum_kwh += hh_value
        if hh_status != 'A':
            non_actual += hh_value
            num_na += 1

    kw_at_md = md * 2
    if kvarh_at_md is None:
        kva_at_md = 'None'
    else:
        kva_at_md = (kw_at_md ** 2 + (kvarh_at_md * 2) ** 2) ** 0.5

    num_bad = num_hh - sess.query(HhDatum).join(Channel).join(Era).filter(
        Era.supply == supply, Channel.imp_related == is_import,
        Channel.channel_type == 'ACTIVE', HhDatum.start_date >= chunk_start,
        HhDatum.start_date <= chunk_finish).count() + num_na

    date_at_md_str = '' if date_at_md is None else hh_format(date_at_md)

    return ','.join(str(val) for val in [
        llfc_code, mpan_core_str, sc_str, supplier_contract_name, sum_kwh,
        non_actual, gsp_kwh, kw_at_md, date_at_md_str, kva_at_md, num_bad])
Exemple #37
0
def content(g_supply_id, file_name, start_date, finish_date, user):
    caches = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'g_supply_virtual_bill_' + str(g_supply_id) + '.csv', user)
        f = open(running_name, mode='w', newline='')
        writer = csv.writer(f, lineterminator='\n')

        g_supply = GSupply.get_by_id(sess, g_supply_id)

        forecast_dt = forecast_date()

        prev_titles = None

        for g_era in sess.query(GEra).filter(
                GEra.g_supply == g_supply, GEra.start_date < finish_date,
                or_(GEra.finish_date == null(),
                    GEra.finish_date > start_date)).order_by(GEra.start_date):

            chunk_start = hh_max(g_era.start_date, start_date)
            chunk_finish = hh_min(g_era.finish_date, finish_date)
            site = sess.query(Site).join(SiteGEra).filter(
                SiteGEra.g_era == g_era, SiteGEra.is_physical == true()).one()

            ds = GDataSource(sess, chunk_start, chunk_finish, forecast_dt,
                             g_era, caches, None)

            titles = [
                'MPRN', 'Site Code', 'Site Name', 'Account', 'From', 'To', ''
            ]

            output_line = [
                g_supply.mprn, site.code, site.name, ds.account,
                hh_format(ds.start_date),
                hh_format(ds.finish_date), ''
            ]

            contract_titles = g_contract_func(caches, g_era.g_contract,
                                              'virtual_bill_titles')()
            titles.extend(contract_titles)

            g_contract_func(caches, g_era.g_contract, 'virtual_bill')(ds)
            bill = ds.bill

            for title in contract_titles:
                if title in bill:
                    output_line.append(csv_make_val(bill[title]))
                    del bill[title]
                else:
                    output_line.append('')

            for k in sorted(bill.keys()):
                output_line.extend([k, bill[k]])

            if titles != prev_titles:
                prev_titles = titles
                writer.writerow([str(v) for v in titles])
            writer.writerow(output_line)
    except BadRequest as e:
        writer.writerow(["Problem: " + e.description])
    except BaseException:
        writer.writerow([traceback.format_exc()])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #38
0
def hh(data_source, rate_period='monthly', est_kw=None):
    for hh in (h for h in data_source.hh_data if h['ct-is-month-end']):
        hh_start = hh['start-date']

        month_start = utc_datetime(hh_start.year, hh_start.month)
        month_finish = month_start + relativedelta(months=1) - HH

        financial_year_start = month_start
        while financial_year_start.month != 4:
            financial_year_start -= relativedelta(months=1)

        last_financial_year_start = financial_year_start - relativedelta(
            years=1)
        financial_year_finish = financial_year_start + relativedelta(
            years=1) - HH

        est_triad_kws = []
        earliest_triad = None
        for dt in get_file_rates(
                data_source.caches, 'triad_dates',
                last_financial_year_start)['triad_dates']:
            triad_hh = None
            earliest_triad = hh_min(earliest_triad, dt)
            try:
                ds = next(
                    data_source.get_data_sources(dt, dt, financial_year_start))
                chellow.duos.duos_vb(ds)
                triad_hh = ds.hh_data[0]

                while dt < financial_year_start:
                    dt += relativedelta(years=1)

                for ds in data_source.get_data_sources(
                        dt, dt, financial_year_start):
                    chellow.duos.duos_vb(ds)
                    datum = ds.hh_data[0]
                    triad_hh['laf'] = datum['laf']
                    triad_hh['gsp-kw'] = datum['laf'] * triad_hh['msp-kw']
            except StopIteration:
                triad_hh = {
                    'hist-start': dt, 'msp-kw': 0, 'start-date': dt,
                    'status': 'before start of MPAN',
                    'laf': 1, 'gsp-kw': 0}
            est_triad_kws.append(triad_hh)

        if data_source.site is None:
            era = data_source.supply.find_era_at(
                data_source.sess, earliest_triad)
            if era is None or era.get_channel(
                    data_source.sess, data_source.is_import, 'ACTIVE') is None:
                if est_kw is not None:
                    est_triad_kw = est_kw
                else:
                    est_triad_kw = 0.85 * max(
                        datum['msp-kwh'] for datum in data_source.hh_data) * 2
                for est_datum in est_triad_kws:
                    est_datum['msp-kw'] = est_triad_kw
                    est_datum['gsp-kw'] = est_datum['msp-kw'] * \
                        est_datum['laf']

        gsp_kw = 0
        for i, triad_hh in enumerate(est_triad_kws):
            triad_prefix = 'triad-estimate-' + str(i + 1)
            hh[triad_prefix + '-date'] = triad_hh['hist-start']
            hh[triad_prefix + '-msp-kw'] = triad_hh['msp-kw']
            hh[triad_prefix + '-status'] = triad_hh['status']
            hh[triad_prefix + '-laf'] = triad_hh['laf']
            hh[triad_prefix + '-gsp-kw'] = triad_hh['gsp-kw']
            gsp_kw += triad_hh['gsp-kw']

        hh['triad-estimate-gsp-kw'] = gsp_kw / 3
        polarity = 'import' if data_source.llfc.is_import else 'export'
        gsp_group_code = data_source.gsp_group_code
        rate = float(
            get_file_rates(
                data_source.caches, 'triad_rates',
                month_start)['triad_gbp_per_gsp_kw'][polarity][gsp_group_code])

        hh['triad-estimate-rate'] = rate

        est_triad_gbp = hh['triad-estimate-rate'] * hh['triad-estimate-gsp-kw']

        if rate_period == 'monthly':
            total_intervals = 12

            est_intervals = 1
            hh['triad-estimate-months'] = est_intervals
        else:
            dt = financial_year_start
            total_intervals = 0
            while dt <= financial_year_finish:
                total_intervals += 1
                dt += relativedelta(days=1)

            est_intervals = 0
            for ds in data_source.get_data_sources(month_start, month_finish):
                for h in ds.hh_data:
                    if h['utc-decimal-hour'] == 0:
                        est_intervals += 1

            hh['triad-estimate-days'] = est_intervals

        hh['triad-estimate-gbp'] = est_triad_gbp / total_intervals * \
            est_intervals

        if month_start.month == 3:
            triad_kws = []
            for t_date in get_file_rates(
                    data_source.caches, 'triad_dates',
                    month_start)['triad_dates']:

                try:
                    ds = next(data_source.get_data_sources(t_date, t_date))
                    if data_source.supplier_contract is None or \
                            ds.supplier_contract == \
                            data_source.supplier_contract:
                        chellow.duos.duos_vb(ds)
                        thh = ds.hh_data[0]
                    else:
                        thh = {
                            'hist-start': t_date, 'msp-kw': 0,
                            'start-date': t_date, 'status': 'before contract',
                            'laf': 'before contract', 'gsp-kw': 0}
                except StopIteration:
                    thh = {
                        'hist-start': t_date, 'msp-kw': 0,
                        'start-date': t_date,
                        'status': 'before start of supply',
                        'laf': 'before start of supply', 'gsp-kw': 0}

                while t_date < financial_year_start:
                    t_date += relativedelta(years=1)

                try:
                    ds = next(data_source.get_data_sources(t_date, t_date))
                    if data_source.supplier_contract is None or \
                            ds.supplier_contract == \
                            data_source.supplier_contract:
                        chellow.duos.duos_vb(ds)
                        thh['laf'] = ds.hh_data[0]['laf']
                        thh['gsp-kw'] = thh['laf'] * thh['msp-kw']
                except StopIteration:
                    pass

                triad_kws.append(thh)

            gsp_kw = 0

            for i, triad_hh in enumerate(triad_kws):
                pref = 'triad-actual-' + str(i + 1)
                hh[pref + '-date'] = triad_hh['start-date']
                hh[pref + '-msp-kw'] = triad_hh['msp-kw']
                hh[pref + '-status'] = triad_hh['status']
                hh[pref + '-laf'] = triad_hh['laf']
                hh[pref + '-gsp-kw'] = triad_hh['gsp-kw']
                gsp_kw += triad_hh['gsp-kw']

            hh['triad-actual-gsp-kw'] = gsp_kw / 3
            polarity = 'import' if data_source.llfc.is_import else 'export'
            gsp_group_code = data_source.gsp_group_code
            tot_rate = 0
            for start_date, finish_date, script in get_file_scripts(
                    'triad_rates'):
                if start_date <= financial_year_finish and not hh_before(
                        finish_date, financial_year_start):
                    start_month = start_date.month
                    if start_month < 4:
                        start_month += 12

                    if finish_date is None:
                        finish_month = financial_year_finish.month
                    else:
                        finish_month = finish_date.month

                    if finish_month < 4:
                        finish_month += 12

                    rt = get_file_rates(
                        data_source.caches, 'triad_rates',
                        start_date
                        )['triad_gbp_per_gsp_kw'][polarity][gsp_group_code]
                    tot_rate += (finish_month - start_month + 1) * float(rt)

            rate = tot_rate / 12
            hh['triad-actual-rate'] = rate

            hh['triad-actual-gbp'] = hh['triad-actual-rate'] * \
                hh['triad-actual-gsp-kw']

            era = data_source.supply.find_era_at(
                data_source.sess, month_finish)
            est_intervals = 0

            interval = relativedelta(months=1) if \
                rate_period == 'monthly' else relativedelta(days=1)

            dt = month_finish
            while era is not None and dt > financial_year_start:
                est_intervals += 1
                dt -= interval
                if hh_after(dt, era.finish_date):
                    era = data_source.supply.find_era_at(data_source.sess, dt)

            if rate_period == 'monthly':
                hh['triad-all-estimates-months'] = est_intervals
            else:
                hh['triad-all-estimates-days'] = est_intervals
            hh['triad-all-estimates-gbp'] = est_triad_gbp / \
                total_intervals * est_intervals * -1
def content(start_date, finish_date, g_contract_id, user):
    report_context = {}
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "gas_virtual_bills.csv", user
        )
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")

        g_contract = GContract.get_by_id(sess, g_contract_id)
        forecast_dt = forecast_date()

        month_start = utc_datetime(start_date.year, start_date.month, 1)
        month_finish = month_start + relativedelta(months=1) - HH

        bill_titles = contract_func(report_context, g_contract, "virtual_bill_titles")()
        writer.writerow(
            ["MPRN", "Site Code", "Site Name", "Account", "From", "To"] + bill_titles
        )

        while not month_start > finish_date:
            period_start = hh_max(start_date, month_start)
            period_finish = hh_min(finish_date, month_finish)

            for g_era in (
                sess.query(GEra)
                .distinct()
                .filter(
                    GEra.g_contract == g_contract,
                    GEra.start_date <= period_finish,
                    or_(GEra.finish_date == null(), GEra.finish_date >= period_start),
                )
            ):

                chunk_start = hh_max(g_era.start_date, period_start)
                chunk_finish = hh_min(g_era.finish_date, period_finish)

                data_source = GDataSource(
                    sess,
                    chunk_start,
                    chunk_finish,
                    forecast_dt,
                    g_era,
                    report_context,
                    None,
                )

                site = (
                    sess.query(Site)
                    .join(SiteGEra)
                    .filter(SiteGEra.g_era == g_era, SiteGEra.is_physical == true())
                    .one()
                )

                vals = [
                    data_source.mprn,
                    site.code,
                    site.name,
                    data_source.account,
                    hh_format(data_source.start_date),
                    hh_format(data_source.finish_date),
                ]

                contract_func(report_context, g_contract, "virtual_bill")(data_source)
                bill = data_source.bill
                for title in bill_titles:
                    if title in bill:
                        val = make_val(bill[title])
                        del bill[title]
                    else:
                        val = ""
                    vals.append(val)

                for k in sorted(bill.keys()):
                    vals.append(k)
                    vals.append(str(bill[k]))
                writer.writerow(vals)

            month_start += relativedelta(months=1)
            month_finish = month_start + relativedelta(months=1) - HH
    except BadRequest as e:
        writer.writerow(["Problem: " + e.description])
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Exemple #40
0
def content(batch_id, bill_id, user):
    caches = {}
    tmp_file = sess = bill = None
    forecast_date = Datetime.max.replace(tzinfo=pytz.utc)
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'bill_check.csv', user)
        tmp_file = open(running_name, mode='w', newline='')
        writer = csv.writer(tmp_file, lineterminator='\n')
        bills = sess.query(Bill).options(
            joinedload(Bill.supply),
            subqueryload(Bill.reads).joinedload(RegisterRead.present_type),
            subqueryload(Bill.reads).joinedload(RegisterRead.previous_type))
        if batch_id is not None:
            batch = Batch.get_by_id(sess, batch_id)
            bills = bills.filter(Bill.batch == batch).order_by(Bill.reference)
        elif bill_id is not None:
            bill = Bill.get_by_id(sess, bill_id)
            bills = bills.filter(Bill.id == bill.id)
            batch = bill.batch

        contract = batch.contract
        market_role_code = contract.market_role.code

        vbf = chellow.computer.contract_func(caches, contract, 'virtual_bill')
        if vbf is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill.")

        virtual_bill_titles_func = chellow.computer.contract_func(
            caches, contract, 'virtual_bill_titles')
        if virtual_bill_titles_func is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill_titles.")
        virtual_bill_titles = virtual_bill_titles_func()

        titles = [
            'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp',
            'bill-vat-gbp', 'bill-start-date', 'bill-finish-date',
            'bill-mpan-core', 'site-code', 'site-name', 'covered-from',
            'covered-to', 'covered-bills', 'metered-kwh']
        for t in virtual_bill_titles:
            titles.append('covered-' + t)
            titles.append('virtual-' + t)
            if t.endswith('-gbp'):
                titles.append('difference-' + t)

        writer.writerow(titles)
        for bill in bills:
            problem = ''
            supply = bill.supply

            read_dict = {}
            for read in bill.reads:
                gen_start = read.present_date.replace(hour=0).replace(minute=0)
                gen_finish = gen_start + relativedelta(days=1) - HH
                msn_match = False
                read_msn = read.msn
                for read_era in supply.find_eras(sess, gen_start, gen_finish):
                    if read_msn == read_era.msn:
                        msn_match = True
                        break

                if not msn_match:
                    problem += "The MSN " + read_msn + \
                        " of the register read " + str(read.id) + \
                        " doesn't match the MSN of the era."

                for dt, type in [
                        (read.present_date, read.present_type),
                        (read.previous_date, read.previous_type)]:
                    key = str(dt) + "-" + read.msn
                    try:
                        if type != read_dict[key]:
                            problem += " Reads taken on " + str(dt) + \
                                " have differing read types."
                    except KeyError:
                        read_dict[key] = type

            bill_start = bill.start_date
            bill_finish = bill.finish_date

            era = supply.find_era_at(sess, bill_finish)
            if era is None:
                raise BadRequest(
                    "Extraordinary! There isn't an era for the bill " +
                    str(bill.id) + ".")

            values = [
                batch.reference, bill.reference, bill.bill_type.code,
                bill.kwh, bill.net, bill.vat, hh_format(bill_start),
                hh_format(bill_finish), era.imp_mpan_core]

            covered_start = bill_start
            covered_finish = bill_finish
            covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0}
            enlarged = True

            while enlarged:
                enlarged = False
                covered_bills = OrderedDict(
                    (b.id, b) for b in sess.query(Bill).join(Batch).
                    join(Contract).join(MarketRole).
                    filter(
                        Bill.supply == supply,
                        Bill.start_date <= covered_finish,
                        Bill.finish_date >= covered_start,
                        MarketRole.code == market_role_code).order_by(
                            Bill.start_date, Bill.issue_date))
                while True:
                    to_del = None
                    for a, b in combinations(covered_bills.values(), 2):
                        if all(
                                (
                                    a.start_date == b.start_date,
                                    a.finish_date == b.finish_date,
                                    a.kwh == -1 * b.kwh, a.net == -1 * b.net,
                                    a.vat == -1 * b.vat,
                                    a.gross == -1 * b.gross)):
                            to_del = (a.id, b.id)
                            break
                    if to_del is None:
                        break
                    else:
                        for k in to_del:
                            del covered_bills[k]
                for covered_bill in covered_bills.values():
                    if covered_bill.start_date < covered_start:
                        covered_start = covered_bill.start_date
                        enlarged = True
                        break
                    if covered_bill.finish_date > covered_finish:
                        covered_finish = covered_bill.finish_date
                        enlarged = True
                        break

            if bill.id not in covered_bills:
                continue

            primary_covered_bill = None
            for covered_bill in covered_bills.values():
                covered_bdown['net-gbp'] += float(covered_bill.net)
                covered_bdown['vat-gbp'] += float(covered_bill.vat)
                covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh)
                if len(covered_bill.breakdown) > 0:
                    covered_rates = defaultdict(set)
                    for k, v in eval(covered_bill.breakdown, {}).items():

                        if k.endswith('rate'):
                            covered_rates[k].add(v)
                        elif k != 'raw-lines':
                            try:
                                covered_bdown[k] += v
                            except KeyError:
                                covered_bdown[k] = v
                            except TypeError as detail:
                                raise BadRequest(
                                    "For key " + str(k) + " the value " +
                                    str(v) +
                                    " can't be added to the existing value " +
                                    str(covered_bdown[k]) + ". " + str(detail))
                    for k, v in covered_rates.items():
                        covered_bdown[k] = v.pop() if len(v) == 1 else None
                if primary_covered_bill is None or (
                        (
                            covered_bill.finish_date -
                            covered_bill.start_date) > (
                            primary_covered_bill.finish_date -
                            primary_covered_bill.start_date)):
                    primary_covered_bill = covered_bill

            virtual_bill = {}
            metered_kwh = 0
            for era in sess.query(Era).filter(
                    Era.supply == supply, Era.imp_mpan_core != null(),
                    Era.start_date <= covered_finish,
                    or_(
                        Era.finish_date == null(),
                        Era.finish_date >= covered_start),
                    or_(
                        Era.mop_contract == contract,
                        Era.hhdc_contract == contract,
                        Era.imp_supplier_contract == contract,
                        Era.exp_supplier_contract == contract)).distinct():
                site = sess.query(Site).join(SiteEra).filter(
                    SiteEra.is_physical == true(), SiteEra.era == era).one()

                chunk_start = hh_max(covered_start, era.start_date)
                chunk_finish = hh_min(covered_finish, era.finish_date)

                data_source = chellow.computer.SupplySource(
                    sess, chunk_start, chunk_finish, forecast_date, era, True,
                    caches, primary_covered_bill)

                if data_source.measurement_type == 'hh':
                    metered_kwh += sum(
                        h['msp-kwh'] for h in data_source.hh_data)
                else:
                    ds = chellow.computer.SupplySource(
                        sess, chunk_start, chunk_finish, forecast_date, era,
                        True, caches)
                    metered_kwh += sum(h['msp-kwh'] for h in ds.hh_data)

                vbf(data_source)

                if market_role_code == 'X':
                    vb = data_source.supplier_bill
                elif market_role_code == 'C':
                    vb = data_source.dc_bill
                elif market_role_code == 'M':
                    vb = data_source.mop_bill
                else:
                    raise BadRequest("Odd market role.")

                for k, v in vb.items():
                    try:
                        if isinstance(v, set):
                            virtual_bill[k].update(v)
                        else:
                            virtual_bill[k] += v
                    except KeyError:
                        virtual_bill[k] = v
                    except TypeError as detail:
                        raise BadRequest(
                            "For key " + str(k) + " and value " + str(v) +
                            ". " + str(detail))

            values += [
                site.code, site.name, hh_format(covered_start),
                hh_format(covered_finish),
                ':'.join(
                    str(i).replace(',', '') for i in covered_bills.keys()),
                metered_kwh]
            for title in virtual_bill_titles:
                try:
                    cov_val = covered_bdown[title]
                    values.append(cov_val)
                    del covered_bdown[title]
                except KeyError:
                    cov_val = None
                    values.append('')

                try:
                    virt_val = csv_make_val(virtual_bill[title])
                    values.append(virt_val)
                    del virtual_bill[title]
                except KeyError:
                    virt_val = None
                    values.append('')

                if title.endswith('-gbp'):
                    if isinstance(virt_val, (int, float)):
                        if isinstance(cov_val, (int, float)):
                            values.append(cov_val - virt_val)
                        else:
                            values.append(0 - virt_val)
                    else:
                        values.append('')

            for title in sorted(virtual_bill.keys()):
                virt_val = csv_make_val(virtual_bill[title])
                values += ['virtual-' + title, virt_val]
                if title in covered_bdown:
                    values += ['covered-' + title, covered_bdown[title]]
                else:
                    values += ['', '']

            writer.writerow(values)
    except BadRequest as e:
        if bill is None:
            prefix = "Problem: "
        else:
            prefix = "Problem with bill " + str(bill.id) + ':'
        tmp_file.write(prefix + e.description)
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        tmp_file.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)