Ejemplo n.º 1
0
def test_c_months_u():
    finish_year, finish_month = 2009, 3
    start, _ = next(
        c_months_u(finish_year=finish_year,
                   finish_month=finish_month,
                   months=1))
    assert start == to_utc(ct_datetime(finish_year, finish_month))
Ejemplo n.º 2
0
    def run(self):
        while not self.stopped.isSet():
            if self.lock.acquire(False):
                sess = self.global_alert = None
                try:
                    sess = Session()
                    self.log("Starting to check bmarketidx.")
                    contract = Contract.get_non_core_by_name(sess, "bmarketidx")
                    latest_rs = (
                        sess.query(RateScript)
                        .filter(RateScript.contract_id == contract.id)
                        .order_by(RateScript.start_date.desc())
                        .first()
                    )
                    start_ct = to_ct(latest_rs.start_date)

                    months = list(
                        c_months_u(
                            start_year=start_ct.year,
                            start_month=start_ct.month,
                            months=2,
                        )
                    )
                    month_start, month_finish = months[1]

                    now = utc_datetime_now()
                    if now > month_finish:
                        _process_month(
                            self.log,
                            sess,
                            contract,
                            latest_rs,
                            month_start,
                            month_finish,
                        )

                except BaseException:
                    self.log(f"Outer problem {traceback.format_exc()}")
                    sess.rollback()
                    self.global_alert = (
                        "There's a problem with the " "bmarketidx automatic importer."
                    )
                finally:
                    self.lock.release()
                    self.log("Finished checking bmarketidx rates.")
                    if sess is not None:
                        sess.close()

            self.going.wait(2 * 60 * 60)
            self.going.clear()
Ejemplo n.º 3
0
def test_c_months_u_start_finish():
    start_year, start_month, finish_year, finish_month = 2009, 3, 2009, 4
    month_list = list(
        c_months_u(
            start_year=start_year,
            start_month=start_month,
            finish_year=finish_year,
            finish_month=finish_month,
        ))
    print(month_list)
    assert month_list == [
        (to_utc(ct_datetime(2009, 3)), to_utc(ct_datetime(2009, 3, 31, 23,
                                                          30))),
        (to_utc(ct_datetime(2009, 4)), to_utc(ct_datetime(2009, 4, 30, 23,
                                                          30))),
    ]
Ejemplo n.º 4
0
def do_get(sess):
    months = req_int("months")
    finish_year = req_int("finish_year")
    finish_month = req_int("finish_month")

    month_list = list(
        c_months_u(finish_year=finish_year,
                   finish_month=finish_month,
                   months=months))
    start_date, finish_date = month_list[0][0], month_list[-1][-1]

    typ = req_str("type")
    site_id = req_int("site_id")
    args = (start_date, finish_date, site_id, typ, g.user)
    threading.Thread(target=content, args=args).start()
    return chellow_redirect("/downloads", 303)
Ejemplo n.º 5
0
def content(year, month, months, supply_id, user):
    sess = f = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "register_reads.csv", user)
        f = open(running_name, mode="w", newline="")
        w = csv.writer(f, lineterminator="\n")
        titles = (
            "Duration Start",
            "Duration Finish",
            "Supply Id",
            "Import MPAN Core",
            "Export MPAN Core",
            "Batch Reference",
            "Bill Id",
            "Bill Reference",
            "Bill Issue Date",
            "Bill Type",
            "Register Read Id",
            "TPR",
            "Coefficient",
            "Previous Read Date",
            "Previous Read Value",
            "Previous Read Type",
            "Present Read Date",
            "Present Read Value",
            "Present Read Type",
        )
        w.writerow(titles)

        month_pairs = list(
            c_months_u(finish_year=year, finish_month=month, months=months))
        start_date, finish_date = month_pairs[0][0], month_pairs[-1][-1]

        supplies = (sess.query(Supply).join(Bill).join(RegisterRead).filter(
            or_(
                and_(
                    RegisterRead.present_date >= start_date,
                    RegisterRead.present_date <= finish_date,
                ),
                and_(
                    RegisterRead.previous_date >= start_date,
                    RegisterRead.previous_date <= finish_date,
                ),
            )).order_by(Bill.supply_id).distinct())

        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            supplies = supplies.filter(Bill.supply == supply)

        for supply in supplies:
            supply_id = supply.id
            for bill, batch, bill_type in (sess.query(
                    Bill, Batch, BillType).join(Batch).join(BillType).join(
                        RegisterRead).filter(
                            Bill.supply == supply,
                            or_(
                                and_(
                                    RegisterRead.present_date >= start_date,
                                    RegisterRead.present_date <= finish_date,
                                ),
                                and_(
                                    RegisterRead.previous_date >= start_date,
                                    RegisterRead.previous_date <= finish_date,
                                ),
                            ),
                        )):

                era = supply.find_era_at(sess, bill.start_date)
                if era is None:
                    eras = (sess.query(Era).filter(
                        Era.supply == supply).order_by(Era.start_date).all())
                    if bill.start_date < eras[0].start_date:
                        era = eras[0]
                    else:
                        era = eras[-1]

                for read in (sess.query(RegisterRead).filter(
                        RegisterRead.bill == bill,
                        or_(
                            and_(
                                RegisterRead.present_date >= start_date,
                                RegisterRead.present_date <= finish_date,
                            ),
                            and_(
                                RegisterRead.previous_date >= start_date,
                                RegisterRead.previous_date <= finish_date,
                            ),
                        ),
                ).options(
                        joinedload(RegisterRead.tpr),
                        joinedload(RegisterRead.previous_type),
                        joinedload(RegisterRead.present_type),
                )):

                    vals = [
                        start_date,
                        finish_date,
                        supply_id,
                        era.imp_mpan_core,
                        era.exp_mpan_core,
                        batch.reference,
                        bill.id,
                        bill.reference,
                        bill.issue_date,
                        bill_type.code,
                        read.id,
                        "md" if read.tpr is None else read.tpr.code,
                        read.coefficient,
                        read.previous_date,
                        read.previous_value,
                        read.previous_type.code,
                        read.present_date,
                        read.present_value,
                        read.present_type.code,
                    ]
                    w.writerow(csv_make_val(v) for v in vals)

                # Avoid a long-running transaction
                sess.rollback()

    except BadRequest as e:
        w.writerow([e.description])
    except BaseException:
        msg = traceback.format_exc()
        f.write(msg)
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Ejemplo n.º 6
0
def content(contract_id, end_year, end_month, months, user):
    caches = {}
    sess = f = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "displaced.csv", user)
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")
        titles = [
            "Site Code",
            "Site Name",
            "Associated Site Ids",
            "From",
            "To",
            "Gen Types",
            "CHP kWh",
            "LM kWh",
            "Turbine kWh",
            "PV kWh",
        ]

        month_list = list(
            c_months_u(finish_year=end_year,
                       finish_month=end_month,
                       months=months))
        start_date, finish_date = month_list[0][0], month_list[-1][-1]

        forecast_date = chellow.computer.forecast_date()

        contract = Contract.get_supplier_by_id(sess, contract_id)
        sites = (sess.query(Site).join(SiteEra).join(Era).join(Supply).join(
            Source).filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                or_(Source.code.in_(("gen", "gen-net")),
                    Era.exp_mpan_core != null()),
            ).distinct())
        bill_titles = chellow.computer.contract_func(
            caches, contract, "displaced_virtual_bill_titles")()

        for title in bill_titles:
            if title == "total-msp-kwh":
                title = "total-displaced-msp-kwh"
            titles.append(title)
        writer.writerow(titles)

        for site in sites:
            for month_start, month_finish in month_list:
                displaced_era = chellow.computer.displaced_era(
                    sess, caches, site, month_start, month_finish,
                    forecast_date)
                if displaced_era is None:
                    continue
                supplier_contract = displaced_era.imp_supplier_contract
                if contract is not None and contract != supplier_contract:
                    continue

                linked_sites = set()
                generator_types = set()
                for era in (sess.query(Era).join(SiteEra).filter(
                        SiteEra.site == site,
                        Era.start_date <= month_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= month_start),
                )):
                    for site_era in era.site_eras:
                        if site_era.site != site:
                            linked_sites.add(site_era.site.code)
                    supply = era.supply
                    if supply.generator_type is not None:
                        generator_types.add(supply.generator_type.code)

                supply_ids = set()
                for era in (sess.query(Era).join(SiteEra).filter(
                        SiteEra.site == site,
                        SiteEra.is_physical,
                        Era.start_date <= month_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= month_start),
                )):
                    supply_ids.add(era.supply.id)

                vals = [
                    site.code,
                    site.name,
                    ", ".join(list(linked_sites)),
                    hh_format(month_start),
                    hh_format(month_finish),
                    ", ".join(list(generator_types)),
                ]

                total_gen_breakdown = {}

                results = iter(
                    sess.execute(
                        "select supply.id, hh_datum.value, "
                        "hh_datum.start_date, channel.imp_related, "
                        "source.code, generator_type.code as "
                        "gen_type_code from hh_datum, channel, source, "
                        "era, supply left outer join generator_type on "
                        "supply.generator_type_id = generator_type.id "
                        "where hh_datum.channel_id = channel.id and "
                        "channel.era_id = era.id and era.supply_id = "
                        "supply.id and supply.source_id = source.id and "
                        "channel.channel_type = 'ACTIVE' and not "
                        "(source.code = 'net' and channel.imp_related "
                        "is true) and hh_datum.start_date >= "
                        ":month_start and hh_datum.start_date "
                        "<= :month_finish and "
                        "supply.id = any(:supply_ids) order "
                        "by hh_datum.start_date, supply.id",
                        params={
                            "month_start": month_start,
                            "month_finish": month_finish,
                            "supply_ids": sorted(list(supply_ids)),
                        },
                    ))

                (
                    sup_id,
                    hh_val,
                    hh_start,
                    imp_related,
                    source_code,
                    gen_type_code,
                ) = next(results, (None, None, None, None, None, None))

                for hh_date in hh_range(caches, month_start, month_finish):
                    gen_breakdown = {}
                    exported = 0
                    while hh_start == hh_date:
                        if not imp_related and source_code in ("net",
                                                               "gen-net"):
                            exported += hh_val
                        if (imp_related and source_code == "gen") or (
                                not imp_related and source_code == "gen-net"):
                            gen_breakdown[gen_type_code] = (
                                gen_breakdown.setdefault(gen_type_code, 0) +
                                hh_val)

                        if (not imp_related and source_code == "gen") or (
                                imp_related and source_code == "gen-net"):
                            gen_breakdown[gen_type_code] = (
                                gen_breakdown.setdefault(gen_type_code, 0) -
                                hh_val)

                        (
                            sup_id,
                            hh_val,
                            hh_start,
                            imp_related,
                            source_code,
                            gen_type_code,
                        ) = next(results, (None, None, None, None, None, None))

                    displaced = sum(gen_breakdown.values()) - exported
                    added_so_far = 0
                    for key in sorted(gen_breakdown.keys()):
                        kwh = gen_breakdown[key]
                        if displaced < 0:
                            total_gen_breakdown[key] = (
                                total_gen_breakdown.get(key, 0) + kwh)
                        else:
                            if kwh + added_so_far > displaced:
                                total_gen_breakdown[key] = (
                                    total_gen_breakdown.get(key, 0) +
                                    displaced - added_so_far)
                                break
                            else:
                                total_gen_breakdown[key] = (
                                    total_gen_breakdown.get(key, 0) + kwh)
                                added_so_far += kwh

                for title in ["chp", "lm", "turb", "pv"]:
                    vals.append(str(total_gen_breakdown.get(title, "")))

                site_ds = chellow.computer.SiteSource(
                    sess,
                    site,
                    month_start,
                    month_finish,
                    forecast_date,
                    caches,
                    displaced_era,
                )
                disp_func = chellow.computer.contract_func(
                    caches, supplier_contract, "displaced_virtual_bill")
                disp_func(site_ds)
                bill = site_ds.supplier_bill
                for title in bill_titles:
                    if title in bill:
                        vals.append(to_val(bill[title]))
                        del bill[title]
                    else:
                        vals.append("")

                for k in sorted(bill.keys()):
                    vals.append(k)
                    vals.append(str(bill[k]))
                writer.writerow(vals)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Ejemplo n.º 7
0
def content(scenario_props, base_name, site_id, supply_id, user, compression,
            site_codes, now):
    report_context = {}

    try:
        comp = report_context["computer"]
    except KeyError:
        comp = report_context["computer"] = {}

    try:
        rate_cache = comp["rates"]
    except KeyError:
        rate_cache = comp["rates"] = {}

    try:
        ind_cont = report_context["contract_names"]
    except KeyError:
        ind_cont = report_context["contract_names"] = {}

    sess = None
    try:
        sess = Session()

        start_year = scenario_props["scenario_start_year"]
        start_month = scenario_props["scenario_start_month"]
        months = scenario_props["scenario_duration"]

        month_pairs = list(
            c_months_u(start_year=start_year,
                       start_month=start_month,
                       months=months))
        start_date_utc = month_pairs[0][0]
        finish_date_utc = month_pairs[-1][-1]

        base_name.append(
            hh_format(start_date_utc).replace(" ", "_").replace(":",
                                                                "").replace(
                                                                    "-", ""))

        base_name.append("for")
        base_name.append(str(months))
        base_name.append("months")

        if "forecast_from" in scenario_props:
            forecast_from = scenario_props["forecast_from"]
        else:
            forecast_from = None

        if forecast_from is None:
            forecast_from = chellow.computer.forecast_date()
        else:
            forecast_from = to_utc(forecast_from)

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append("site")
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append("supply")
            base_name.append(str(supply.id))
            sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply)
        if len(site_codes) > 0:
            base_name.append("sitecodes")
            sites = sites.filter(Site.code.in_(site_codes))

        running_name, finished_name = chellow.dloads.make_names(
            "_".join(base_name) + ".ods", user)

        rf = open(running_name, "wb")
        site_rows = []
        era_rows = []

        for rate_script in scenario_props.get("local_rates", []):
            contract_id = rate_script["contract_id"]
            try:
                cont_cache = rate_cache[contract_id]
            except KeyError:
                cont_cache = rate_cache[contract_id] = {}

            try:
                rate_script_start = rate_script["start_date"]
            except KeyError:
                raise BadRequest(
                    f"Problem in the scenario properties. Can't find the "
                    f"'start_date' key of the contract {contract_id} in "
                    f"the 'local_rates' map.")

            try:
                rate_script_start = rate_script["start_date"]
            except KeyError:
                raise BadRequest(
                    f"Problem in the scenario properties. Can't find the "
                    f"'start_date' key of the contract {contract_id} in "
                    f"the 'local_rates' map.")

            props = PropDict("scenario properties", rate_script["script"])
            for dt in hh_range(report_context, rate_script_start,
                               rate_script["finish_date"]):
                cont_cache[dt] = props

        for rate_script in scenario_props.get("industry_rates", []):
            contract_name = rate_script["contract_name"]
            try:
                cont_cache = ind_cont[contract_name]
            except KeyError:
                cont_cache = ind_cont[contract_name] = {}

            rfinish = rate_script["finish_date"]
            if rfinish is None:
                raise BadRequest(
                    f"For the industry rate {contract_name} the finish_date "
                    f"can't be null.")
            for dt in hh_range(report_context, rate_script["start_date"],
                               rfinish):
                cont_cache[dt] = PropDict("scenario properties",
                                          rate_script["script"])

        era_maps = scenario_props.get("era_maps", {})
        by_hh = scenario_props.get("by_hh", False)

        scenario_hh = scenario_props.get("hh_data", {})

        era_header_titles = [
            "creation-date",
            "imp-mpan-core",
            "imp-supplier-contract",
            "exp-mpan-core",
            "exp-supplier-contract",
            "metering-type",
            "source",
            "generator-type",
            "supply-name",
            "msn",
            "pc",
            "site-id",
            "site-name",
            "associated-site-ids",
            "month",
        ]
        site_header_titles = [
            "creation-date",
            "site-id",
            "site-name",
            "associated-site-ids",
            "month",
            "metering-type",
            "sources",
            "generator-types",
        ]
        summary_titles = [
            "import-net-kwh",
            "export-net-kwh",
            "import-gen-kwh",
            "export-gen-kwh",
            "import-3rd-party-kwh",
            "export-3rd-party-kwh",
            "displaced-kwh",
            "used-kwh",
            "used-3rd-party-kwh",
            "import-net-gbp",
            "export-net-gbp",
            "import-gen-gbp",
            "export-gen-gbp",
            "import-3rd-party-gbp",
            "export-3rd-party-gbp",
            "displaced-gbp",
            "used-gbp",
            "used-3rd-party-gbp",
            "billed-import-net-kwh",
            "billed-import-net-gbp",
            "billed-supplier-import-net-gbp",
            "billed-dc-import-net-gbp",
            "billed-mop-import-net-gbp",
        ]

        title_dict = {}
        for cont_type, con_attr in (
            ("mop", Era.mop_contract),
            ("dc", Era.dc_contract),
            ("imp-supplier", Era.imp_supplier_contract),
            ("exp-supplier", Era.exp_supplier_contract),
        ):
            titles = []
            title_dict[cont_type] = titles
            conts = (sess.query(Contract).join(con_attr).join(
                Era.supply).join(Source).filter(
                    Era.start_date <= finish_date_utc,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= start_date_utc),
                ).distinct().order_by(Contract.id))
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, "virtual_bill_titles")
                if title_func is None:
                    raise Exception(
                        f"For the contract {cont.name} there doesn't seem to "
                        f"be a 'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join(
            Ssc).join(Era).filter(
                Era.start_date <= finish_date_utc,
                or_(Era.finish_date == null(),
                    Era.finish_date >= start_date_utc),
            ).order_by(Tpr.code).distinct())
        for tpr in tpr_query.filter(Era.imp_supplier_contract != null()):
            for suffix in ("-kwh", "-rate", "-gbp"):
                title_dict["imp-supplier"].append(tpr.code + suffix)
        for tpr in tpr_query.filter(Era.exp_supplier_contract != null()):
            for suffix in ("-kwh", "-rate", "-gbp"):
                title_dict["exp-supplier"].append(tpr.code + suffix)

        era_rows.append(
            era_header_titles + summary_titles + [None] +
            ["mop-" + t for t in title_dict["mop"]] + [None] +
            ["dc-" + t for t in title_dict["dc"]] + [None] +
            ["imp-supplier-" + t for t in title_dict["imp-supplier"]] +
            [None] + ["exp-supplier-" + t for t in title_dict["exp-supplier"]])
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        deltas = {}
        normal_reads = set()
        normal_read_rows = []

        for site in sites:
            deltas[site.id] = _make_site_deltas(sess, report_context, site,
                                                scenario_hh, forecast_from,
                                                supply_id)

        for month_start, month_finish in month_pairs:
            for site in sites:
                if by_hh:
                    sf = [(d, d) for d in hh_range(report_context, month_start,
                                                   month_finish)]
                else:
                    sf = [(month_start, month_finish)]

                for start, finish in sf:
                    normal_reads = normal_reads | _process_site(
                        sess,
                        report_context,
                        forecast_from,
                        start,
                        finish,
                        site,
                        deltas[site.id],
                        supply_id,
                        era_maps,
                        now,
                        summary_titles,
                        title_dict,
                        era_rows,
                        site_rows,
                    )

            normal_read_rows = [[
                "mpan_core", "date", "msn", "type", "registers"
            ]]
            for mpan_core, r in sorted(list(normal_reads)):
                row = [mpan_core, r.date, r.msn, r.type] + list(r.reads)
                normal_read_rows.append(row)

            write_spreadsheet(rf, compression, site_rows, era_rows,
                              normal_read_rows)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows,
                          normal_read_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows,
                          normal_read_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names("error.txt", user)
            ef = open(r_name, "w")
            ef.write(msg + "\n")
            ef.close()
Ejemplo n.º 8
0
def _make_site_deltas(sess, report_context, site, scenario_hh, forecast_from,
                      supply_id):
    site_scenario_hh = scenario_hh.get(site.code, {})

    site_deltas = {"hhs": {}}
    delts = site_deltas["supply_deltas"] = {}
    for is_import in (True, False):
        delts[is_import] = {}
        for src in ("gen", "net", "gen-net", "3rd-party", "3rd-party-reverse",
                    "sub"):
            delts[is_import][src] = {"site": {}}

    earliest_delta = to_utc(Datetime.max)
    latest_delta = to_utc(Datetime.min)

    found_hh = False
    for typ in ("used", "generated", "parasitic", "gen_net"):
        hh_str = site_scenario_hh.get(typ, "")
        hh_data = site_scenario_hh[typ] = {}
        for row in csv.reader(StringIO(hh_str)):
            cells = [cell.strip() for cell in row]
            if len("".join(cells)) == 0:
                continue

            if len(cells) != 2:
                raise BadRequest("Can't interpret the row " + str(cells) +
                                 " it should be of the form 'timestamp, kWh'")

            date_str, kwh_str = cells
            ts = parse_hh_start(date_str)
            earliest_delta = min(ts, earliest_delta)
            latest_delta = max(ts, latest_delta)
            try:
                hh_data[ts] = float(kwh_str)
            except ValueError as e:
                raise BadRequest("When looking at " + typ +
                                 " hh data, can't parse the "
                                 "kWh at " + date_str + ": " + str(e))
            found_hh = True

    if not found_hh:
        return site_deltas

    scenario_used = site_scenario_hh["used"]
    scenario_generated = site_scenario_hh["generated"]
    scenario_parasitic = site_scenario_hh["parasitic"]
    scenario_gen_net = site_scenario_hh["gen_net"]

    earliest_delta_ct = to_ct(earliest_delta)
    for month_start, month_finish in c_months_u(earliest_delta_ct.year,
                                                earliest_delta_ct.month,
                                                months=None):
        if month_start > latest_delta:
            break
        chunk_start = hh_max(month_start, earliest_delta)
        chunk_finish = hh_min(month_finish, latest_delta)

        site_ds = chellow.computer.SiteSource(sess, site, chunk_start,
                                              chunk_finish, forecast_from,
                                              report_context)
        hh_map = dict((h["start-date"], h) for h in site_ds.hh_data)

        for era in (sess.query(Era).join(SiteEra).join(Pc).filter(
                SiteEra.site == site,
                SiteEra.is_physical == true(),
                Era.imp_mpan_core != null(),
                Pc.code != "00",
                Era.start_date <= chunk_finish,
                or_(Era.finish_date == null(), Era.finish_date >= chunk_start),
        )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              True, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                sdatum["import-net-kwh"] += hh["msp-kwh"]
                sdatum["used-kwh"] += hh["msp-kwh"]

        for era in (sess.query(Era).join(SiteEra).join(Pc).join(Supply).join(
                Source).filter(
                    SiteEra.site == site,
                    SiteEra.is_physical == true(),
                    Era.imp_mpan_core != null(),
                    Era.start_date <= chunk_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= chunk_start),
                    Source.code == "gen-net",
                )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              False, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                try:
                    sdatum["gen-net-kwh"] += hh["msp-kwh"]
                except KeyError:
                    sdatum["gen-net-kwh"] = hh["msp-kwh"]

        for hh_start, hh in hh_map.items():
            if hh_start in scenario_used:
                used_delt = scenario_used[hh_start] - hh["used-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if used_delt < 0:
                    diff = hh["import-net-kwh"] + used_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += used_delt
                else:
                    diff = hh["export-net-kwh"] - used_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= used_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["used-kwh"] += used_delt
                hh["msp-kwh"] -= exp_net_delt

            if hh_start in scenario_generated:
                imp_gen_delt = scenario_generated[hh_start] - hh[
                    "import-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if imp_gen_delt < 0:
                    diff = hh["export-net-kwh"] + imp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt += imp_gen_delt
                else:
                    diff = hh["import-net-kwh"] - imp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt -= imp_gen_delt

                try:
                    delts[True]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[True]["gen"]["site"][hh_start] = imp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["import-gen-kwh"] += imp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_parasitic:
                exp_gen_delt = scenario_parasitic[hh_start] - hh[
                    "export-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if exp_gen_delt < 0:
                    diff = hh["import-net-kwh"] + exp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += exp_gen_delt
                else:
                    diff = hh["export-net-kwh"] - exp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= exp_gen_delt

                try:
                    delts[False]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[False]["gen"]["site"][hh_start] = exp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["export-gen-kwh"] += exp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_gen_net:
                gen_net_delt = scenario_gen_net[hh_start] - hh["gen-net-kwh"]

                try:
                    delts[False]["gen-net"]["site"][hh_start] += gen_net_delt
                except KeyError:
                    delts[False]["gen-net"]["site"][hh_start] = gen_net_delt

                hh["import-gen-kwh"] += gen_net_delt
                hh["export-net-kwh"] += gen_net_delt

            site_deltas["hhs"][hh_start] = hh

    sup_deltas = site_deltas["supply_deltas"][False]["net"]["site"]
    if all(v == 0 for v in sup_deltas.values()):
        sup_deltas.clear()

    return site_deltas
Ejemplo n.º 9
0
def content(contract_id, end_year, end_month, months, user):
    caches = {}
    sess = f = supply_source = None
    try:
        sess = Session()
        contract = Contract.get_dc_by_id(sess, contract_id)

        month_list = list(
            c_months_u(finish_year=end_year,
                       finish_month=end_month,
                       months=months))
        start_date, finish_date = month_list[0][0], month_list[-1][-1]

        forecast_date = chellow.computer.forecast_date()
        running_name, finished_name = chellow.dloads.make_names(
            "dc_virtual_bills.csv", user)

        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")

        bill_titles = chellow.computer.contract_func(caches, contract,
                                                     "virtual_bill_titles")()
        header_titles = [
            "Import MPAN Core",
            "Export MPAN Core",
            "Start Date",
            "Finish Date",
        ]

        vb_func = chellow.computer.contract_func(caches, contract,
                                                 "virtual_bill")

        writer.writerow(header_titles + bill_titles)

        for era in (sess.query(Era).distinct().filter(
                or_(Era.finish_date == null(), Era.finish_date >= start_date),
                Era.start_date <= finish_date,
                Era.dc_contract == contract,
        ).options(joinedload(Era.channels)).order_by(Era.supply_id)):

            imp_mpan_core = era.imp_mpan_core
            if imp_mpan_core is None:
                imp_mpan_core_str = ""
                is_import = False
            else:
                is_import = True
                imp_mpan_core_str = imp_mpan_core

            exp_mpan_core = era.exp_mpan_core
            exp_mpan_core_str = "" if exp_mpan_core is None else exp_mpan_core

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)

            vals = [
                imp_mpan_core_str,
                exp_mpan_core_str,
                hh_format(chunk_start),
                hh_format(chunk_finish),
            ]

            supply_source = chellow.computer.SupplySource(
                sess, chunk_start, chunk_finish, forecast_date, era, is_import,
                caches)
            vb_func(supply_source)
            bill = supply_source.dc_bill

            for title in bill_titles:
                vals.append(csv_make_val(bill.get(title)))
                if title in bill:
                    del bill[title]

            for k in sorted(bill.keys()):
                vals.append(k)
                vals.append(csv_make_val(bill[k]))

            writer.writerow(vals)

            # Avoid long-running transactions
            sess.rollback()
    except BadRequest as e:
        msg = "Problem "
        if supply_source is not None:
            msg += ("with supply " + supply_source.mpan_core +
                    " starting at " + hh_format(supply_source.start_date) +
                    " ")
        msg += str(e)
        writer.writerow([msg])
    except BaseException:
        msg = "Problem " + traceback.format_exc() + "\n"
        f.write(msg)
    finally:
        f.close()
        os.rename(running_name, finished_name)
        if sess is not None:
            sess.close()
Ejemplo n.º 10
0
def _process_hh(ds, rate_period, est_kw, hh):
    month_start, month_finish = next(
        c_months_u(start_year=hh["ct-year"], start_month=hh["ct-month"]))

    month_start_ct = to_ct(month_start)
    if month_start_ct.month > 3:
        year = month_start_ct.year
    else:
        year = month_start_ct.year - 1
    financial_year_start = to_utc(ct_datetime(year, 4, 1))
    last_financial_year_start = to_utc(ct_datetime(year - 1, 4, 1))
    financial_year_finish = to_utc(ct_datetime(year + 1, 3, 31, 23, 30))

    est_triad_kws = []
    earliest_triad = None
    for dt in get_file_rates(ds.caches, "triad_dates",
                             last_financial_year_start)["triad_dates"]:
        triad_hh = None
        earliest_triad = hh_min(earliest_triad, dt)
        try:
            d = next(ds.get_data_sources(dt, dt, financial_year_start))
            chellow.duos.duos_vb(d)
            triad_hh = d.hh_data[0]

            while dt < financial_year_start:
                dt += relativedelta(years=1)

            for d in ds.get_data_sources(dt, dt, financial_year_start):
                chellow.duos.duos_vb(d)
                datum = d.hh_data[0]
                triad_hh["laf"] = datum["laf"]
                triad_hh["gsp-kw"] = datum["laf"] * triad_hh["msp-kw"]
        except StopIteration:
            triad_hh = {
                "hist-start": dt,
                "msp-kw": 0,
                "start-date": dt,
                "status": "before start of MPAN",
                "laf": 1,
                "gsp-kw": 0,
            }
        est_triad_kws.append(triad_hh)

    if ds.site is None:
        era = ds.supply.find_era_at(ds.sess, earliest_triad)
        if (era is None
                or era.get_channel(ds.sess, ds.is_import, "ACTIVE") is None
                and est_kw is None):
            est_kw = 0.85 * max(datum["msp-kwh"] for datum in ds.hh_data) * 2
        if est_kw is not None:
            for est_datum in est_triad_kws:
                est_datum["msp-kw"] = est_kw
                est_datum["gsp-kw"] = est_datum["msp-kw"] * est_datum["laf"]

    gsp_kw = 0
    for i, triad_hh in enumerate(est_triad_kws):
        triad_prefix = "triad-estimate-" + str(i + 1)
        hh[triad_prefix + "-date"] = triad_hh["hist-start"]
        hh[triad_prefix + "-msp-kw"] = triad_hh["msp-kw"]
        hh[triad_prefix + "-status"] = triad_hh["status"]
        hh[triad_prefix + "-laf"] = triad_hh["laf"]
        hh[triad_prefix + "-gsp-kw"] = triad_hh["gsp-kw"]
        gsp_kw += triad_hh["gsp-kw"]

    hh["triad-estimate-gsp-kw"] = gsp_kw / 3
    polarity = "import" if ds.llfc.is_import else "export"
    gsp_group_code = ds.gsp_group_code
    rate = float(
        get_file_rates(
            ds.caches, "triad_rates",
            month_start)["triad_gbp_per_gsp_kw"][polarity][gsp_group_code])

    hh["triad-estimate-rate"] = rate

    est_triad_gbp = hh["triad-estimate-rate"] * hh["triad-estimate-gsp-kw"]

    if rate_period == "monthly":
        total_intervals = 12

        est_intervals = 1
        hh["triad-estimate-months"] = est_intervals
    else:
        dt = financial_year_start
        total_intervals = 0
        while dt <= financial_year_finish:
            total_intervals += 1
            dt += relativedelta(days=1)

        est_intervals = 0
        for d in ds.get_data_sources(month_start, month_finish):
            for h in d.hh_data:
                if h["ct-decimal-hour"] == 0:
                    est_intervals += 1

        hh["triad-estimate-days"] = est_intervals

    hh["triad-estimate-gbp"] = est_triad_gbp / total_intervals * est_intervals

    if hh["ct-month"] == 3:
        triad_kws = []
        for t_date in get_file_rates(ds.caches, "triad_dates",
                                     month_start)["triad_dates"]:
            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh = d.hh_data[0]
                else:
                    thh = {
                        "hist-start": t_date,
                        "msp-kw": 0,
                        "start-date": t_date,
                        "status": "before contract",
                        "laf": "before contract",
                        "gsp-kw": 0,
                    }
            except StopIteration:
                thh = {
                    "hist-start": t_date,
                    "msp-kw": 0,
                    "start-date": t_date,
                    "status": "before start of supply",
                    "laf": "before start of supply",
                    "gsp-kw": 0,
                }

            while t_date < financial_year_start:
                t_date += relativedelta(years=1)

            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh["laf"] = d.hh_data[0]["laf"]
                    thh["gsp-kw"] = thh["laf"] * thh["msp-kw"]
            except StopIteration:
                pass

            triad_kws.append(thh)
        gsp_kw = 0

        for i, triad_hh in enumerate(triad_kws):
            pref = "triad-actual-" + str(i + 1)
            hh[pref + "-date"] = triad_hh["start-date"]
            hh[pref + "-msp-kw"] = triad_hh["msp-kw"]
            hh[pref + "-status"] = triad_hh["status"]
            hh[pref + "-laf"] = triad_hh["laf"]
            hh[pref + "-gsp-kw"] = triad_hh["gsp-kw"]
            gsp_kw += triad_hh["gsp-kw"]

        hh["triad-actual-gsp-kw"] = gsp_kw / 3
        polarity = "import" if ds.llfc.is_import else "export"
        gsp_group_code = ds.gsp_group_code
        tot_rate = 0
        for start_date, finish_date, script in get_file_scripts("triad_rates"):
            if start_date <= financial_year_finish and not hh_before(
                    finish_date, financial_year_start):
                start_month = to_ct(start_date).month
                if start_month < 4:
                    start_month += 12

                if finish_date is None:
                    finish_month = 3
                else:
                    finish_month = to_ct(finish_date).month

                if finish_month < 4:
                    finish_month += 12

                rt = get_file_rates(
                    ds.caches, "triad_rates", start_date
                )["triad_gbp_per_gsp_kw"][polarity][gsp_group_code]
                tot_rate += (finish_month - start_month + 1) * float(rt)

        rate = tot_rate / 12
        hh["triad-actual-rate"] = rate

        hh["triad-actual-gbp"] = hh["triad-actual-rate"] * hh[
            "triad-actual-gsp-kw"]

        era = ds.supply.find_era_at(ds.sess, month_finish)
        est_intervals = 0

        interval = (relativedelta(
            months=1) if rate_period == "monthly" else relativedelta(days=1))

        dt = month_finish
        while era is not None and dt > financial_year_start:
            est_intervals += 1
            dt -= interval
            if hh_after(dt, era.finish_date):
                era = ds.supply.find_era_at(ds.sess, dt)

        if rate_period == "monthly":
            hh["triad-all-estimates-months"] = est_intervals
        else:
            hh["triad-all-estimates-days"] = est_intervals
        hh["triad-all-estimates-gbp"] = (est_triad_gbp / total_intervals *
                                         est_intervals * -1)
def content(site_id,
            g_supply_id,
            user,
            compression,
            finish_year,
            finish_month,
            months,
            now=None):
    if now is None:
        now = ct_datetime_now()
    report_context = {}
    sess = None
    month_list = list(
        c_months_u(finish_year=finish_year,
                   finish_month=finish_month,
                   months=months))
    start_date, finish_date = month_list[0][0], month_list[-1][-1]

    try:
        sess = Session()
        base_name = [
            "g_monthly_duration",
            hh_format(start_date).replace(" ",
                                          "_").replace(":",
                                                       "").replace("-", ""),
            "for",
            str(months),
            "months",
        ]

        forecast_from = chellow.computer.forecast_date()

        sites = (sess.query(Site).join(SiteGEra).join(GEra).filter(
            SiteGEra.is_physical == true()).distinct().order_by(Site.code))
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append("site")
            base_name.append(site.code)
        if g_supply_id is not None:
            g_supply = GSupply.get_by_id(sess, g_supply_id)
            base_name.append("g_supply")
            base_name.append(str(g_supply.id))
            sites = sites.filter(GEra.g_supply == g_supply)

        running_name, finished_name = chellow.dloads.make_names(
            "_".join(base_name) + ".ods", user)

        rf = open(running_name, "wb")
        site_rows = []
        g_era_rows = []

        era_header_titles = [
            "creation_date",
            "mprn",
            "supply_name",
            "exit_zone",
            "msn",
            "unit",
            "contract",
            "site_id",
            "site_name",
            "associated_site_ids",
            "month",
        ]
        site_header_titles = [
            "creation_date",
            "site_id",
            "site_name",
            "associated_site_ids",
            "month",
        ]
        summary_titles = ["kwh", "gbp", "billed_kwh", "billed_gbp"]

        vb_titles = []
        conts = (sess.query(GContract).join(GEra).join(GSupply).filter(
            GEra.start_date <= finish_date,
            or_(GEra.finish_date == null(), GEra.finish_date >= start_date),
        ).distinct().order_by(GContract.id))
        if g_supply_id is not None:
            conts = conts.filter(GEra.g_supply_id == g_supply_id)
        for cont in conts:
            title_func = chellow.computer.contract_func(
                report_context, cont, "virtual_bill_titles")
            if title_func is None:
                raise Exception("For the contract " + cont.name +
                                " there doesn't seem " +
                                "to be a 'virtual_bill_titles' function.")
            for title in title_func():
                if title not in vb_titles:
                    vb_titles.append(title)

        g_era_rows.append(era_header_titles + summary_titles + vb_titles)
        site_rows.append(site_header_titles + summary_titles)

        for month_start, month_finish in month_list:
            for site in sites.filter(
                    GEra.start_date <= month_finish,
                    or_(GEra.finish_date == null(),
                        GEra.finish_date >= month_start),
            ):
                site_kwh = site_gbp = site_billed_kwh = site_billed_gbp = 0

                for g_era in (sess.query(GEra).join(SiteGEra).filter(
                        SiteGEra.site == site,
                        SiteGEra.is_physical == true(),
                        GEra.start_date <= month_finish,
                        or_(GEra.finish_date == null(),
                            GEra.finish_date >= month_start),
                ).options(
                        joinedload(GEra.g_contract),
                        joinedload(GEra.g_supply),
                        joinedload(GEra.g_supply).joinedload(
                            GSupply.g_exit_zone),
                ).order_by(GEra.id)):

                    g_supply = g_era.g_supply

                    if g_supply_id is not None and g_supply.id != g_supply_id:
                        continue

                    ss_start = hh_max(g_era.start_date, month_start)
                    ss_finish = hh_min(g_era.finish_date, month_finish)

                    ss = GDataSource(
                        sess,
                        ss_start,
                        ss_finish,
                        forecast_from,
                        g_era,
                        report_context,
                        None,
                    )

                    contract = g_era.g_contract
                    vb_function = contract_func(report_context, contract,
                                                "virtual_bill")
                    if vb_function is None:
                        raise BadRequest(
                            "The contract " + contract.name +
                            " doesn't have the virtual_bill() function.")
                    vb_function(ss)
                    bill = ss.bill

                    try:
                        gbp = bill["net_gbp"]
                    except KeyError:
                        gbp = 0
                        bill["problem"] += (
                            "For the supply " + ss.mprn +
                            " the virtual bill " + str(bill) +
                            " from the contract " + contract.name +
                            " does not contain the net_gbp key.")
                    try:
                        kwh = bill["kwh"]
                    except KeyError:
                        kwh = 0
                        bill["problem"] += ("For the supply " + ss.mprn +
                                            " the virtual bill " + str(bill) +
                                            " from the contract " +
                                            contract.name +
                                            " does not contain the 'kwh' key.")

                    billed_kwh = billed_gbp = 0

                    g_era_associates = {
                        s.site.code
                        for s in g_era.site_g_eras if not s.is_physical
                    }

                    for g_bill in sess.query(GBill).filter(
                            GBill.g_supply == g_supply,
                            GBill.start_date <= ss_finish,
                            GBill.finish_date >= ss_start,
                    ):
                        bill_start = g_bill.start_date
                        bill_finish = g_bill.finish_date
                        bill_duration = (bill_finish - bill_start
                                         ).total_seconds() + (30 * 60)
                        overlap_duration = (min(bill_finish, ss_finish) - max(
                            bill_start, ss_start)).total_seconds() + (30 * 60)
                        overlap_proportion = overlap_duration / bill_duration
                        billed_kwh += overlap_proportion * float(g_bill.kwh)
                        billed_gbp += overlap_proportion * float(g_bill.net)

                    associated_site_ids = ",".join(sorted(g_era_associates))
                    g_era_rows.append([
                        make_val(v) for v in [
                            now,
                            g_supply.mprn,
                            g_supply.name,
                            g_supply.g_exit_zone.code,
                            g_era.msn,
                            g_era.g_unit.code,
                            contract.name,
                            site.code,
                            site.name,
                            associated_site_ids,
                            month_finish,
                            kwh,
                            gbp,
                            billed_kwh,
                            billed_gbp,
                        ]
                    ] + [make_val(bill.get(t)) for t in vb_titles])

                    site_kwh += kwh
                    site_gbp += gbp
                    site_billed_kwh += billed_kwh
                    site_billed_gbp += billed_gbp

                linked_sites = ", ".join(s.code
                                         for s in site.find_linked_sites(
                                             sess, month_start, month_finish))

                site_rows.append([
                    make_val(v) for v in [
                        now,
                        site.code,
                        site.name,
                        linked_sites,
                        month_finish,
                        site_kwh,
                        site_gbp,
                        site_billed_kwh,
                        site_billed_gbp,
                    ]
                ])
                sess.rollback()
            write_spreadsheet(rf, compression, site_rows, g_era_rows)

    except BadRequest as e:
        site_rows.append(["Problem " + e.description])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, g_era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names("error.txt", user)
            ef = open(r_name, "w")
            ef.write(msg + "\n")
            ef.close()
Ejemplo n.º 12
0
def datum_2010_04_01(ds, hh):
    start_date = hh["start-date"]
    dno_cache = ds.caches["dno"][ds.dno_code]

    if not ds.full_channels and not (hh["msp-kwh"] > 0 and hh["anti-msp-kwh"] == 0):
        imp_msp_kvarh, exp_msp_kvarh = 0, 0
    else:
        imp_msp_kvarh, exp_msp_kvarh = hh["imp-msp-kvarh"], hh["exp-msp-kvarh"]

    try:
        gsp_group_cache = dno_cache[ds.gsp_group_code]
    except KeyError:
        gsp_group_cache = dno_cache[ds.gsp_group_code] = {}

    try:
        tariff = gsp_group_cache["tariffs"][ds.llfc_code][start_date]
    except KeyError:
        try:
            tariff_cache = gsp_group_cache["tariffs"]
        except KeyError:
            tariff_cache = gsp_group_cache["tariffs"] = {}

        try:
            tariffs = tariff_cache[ds.llfc_code]
        except KeyError:
            tariffs = tariff_cache[ds.llfc_code] = {}

        try:
            tariff = tariffs[start_date]
        except KeyError:
            tariff = None
            try:
                tariff_list = get_file_rates(ds.caches, ds.dno_code, start_date)[
                    ds.gsp_group_code
                ]["tariffs"]
            except KeyError as e:
                raise BadRequest(str(e))

            for llfcs_pcs, tf in tariff_list.items():
                key = llfcs_pcs.split("_")
                llfcs = [v.strip() for v in key[0].split(",")]
                if len(key) == 2:
                    pcs = [v.strip() for v in key[1].split(",")]
                else:
                    pcs = None

                if ds.llfc_code in llfcs and (pcs is None or ds.pc_code in pcs):
                    tariff = tf
                    break

            if tariff is None:
                raise BadRequest(
                    f"For the DNO {ds.dno_code} and timestamp {hh_format(start_date)} "
                    f"and GSP group {ds.gsp_group_code}, the LLFC '{ds.llfc_code}' "
                    f"can't be found in the 'tariffs' section."
                )

            tariffs[start_date] = tariff

    try:
        band = gsp_group_cache["bands"][start_date]
    except KeyError:
        try:
            bands_cache = gsp_group_cache["bands"]
        except KeyError:
            bands_cache = gsp_group_cache["bands"] = {}

        try:
            band = bands_cache[start_date]
        except KeyError:
            band = "green"
            ct_hr = hh["ct-decimal-hour"]
            weekend = hh["ct-day-of-week"] > 4
            try:
                slots = get_file_rates(ds.caches, ds.dno_code, start_date)[
                    ds.gsp_group_code
                ]["bands"]
            except KeyError as e:
                raise BadRequest(str(e))

            for slot in slots:
                slot_weekend = slot["weekend"] == 1
                if slot_weekend == weekend and slot["start"] <= ct_hr < slot["finish"]:
                    band = slot["band"]
                    break

            bands_cache[start_date] = band

    try:
        laf = dno_cache["lafs"][ds.llfc_code][start_date]
    except KeyError:
        try:
            laf_cache = dno_cache["lafs"]
        except KeyError:
            laf_cache = dno_cache["lafs"] = {}

        try:
            laf_cache_llfc = laf_cache[ds.llfc_code]
        except KeyError:
            laf_cache_llfc = laf_cache[ds.llfc_code] = {}

        try:
            laf = laf_cache_llfc[start_date]
        except KeyError:
            dno_code = ds.dno_code
            if dno_code == "99":
                laf_cache_llfc[start_date] = 1
            else:

                m_start, m_finish = next(
                    c_months_u(
                        start_year=hh["ct-year"], start_month=hh["ct-month"], months=1
                    )
                )
                for (laf,) in ds.sess.execute(
                    select(Laf)
                    .join(Llfc)
                    .join(Party)
                    .where(
                        Party.dno_code == ds.dno_code,
                        Llfc.code == ds.llfc_code,
                        Laf.timestamp >= m_start,
                        Laf.timestamp <= m_finish,
                    )
                ):
                    laf_cache_llfc[laf.timestamp] = float(laf.value)

            try:
                laf = laf_cache_llfc[start_date]
            except KeyError:
                raise BadRequest(
                    f"Missing LAF for DNO {ds.dno_code}, LLFC {ds.llfc_code} and "
                    f"timestamp {hh_format(start_date)}"
                )

    hh["laf"] = laf
    hh["gsp-kwh"] = laf * hh["msp-kwh"]
    hh["gsp-kw"] = hh["gsp-kwh"] * 2

    kvarh = max(
        max(imp_msp_kvarh, exp_msp_kvarh) - (0.95 ** -2 - 1) ** 0.5 * hh["msp-kwh"], 0
    )

    hh["duos-reactive-kvarh"] = kvarh

    duos_reactive_rate = tariff["gbp-per-kvarh"]
    if duos_reactive_rate is not None:
        duos_reactive_rate = float(duos_reactive_rate)
        if duos_reactive_rate != 0:
            hh["duos-reactive-rate"] = duos_reactive_rate
            hh["duos-reactive-gbp"] = kvarh * duos_reactive_rate

    rate = float(tariff[KEYS[band]["tariff-rate"]])
    hh[KEYS[band]["bill-rate"]] = rate
    hh[KEYS[band]["kwh"]] = hh["msp-kwh"]
    hh[KEYS[band]["gbp"]] = rate * hh["msp-kwh"]

    if hh["ct-decimal-hour"] == 23.5 and not ds.is_displaced:
        hh["duos-fixed-days"] = 1
        rate = float(tariff["gbp-per-mpan-per-day"])
        hh["duos-fixed-rate"] = rate
        hh["duos-fixed-gbp"] = rate

        hh["duos-availability-days"] = 1
        kva = ds.sc
        hh["duos-availability-kva"] = kva
        rate = float(tariff["gbp-per-kva-per-day"])
        hh["duos-availability-rate"] = rate
        hh["duos-availability-gbp"] = rate * kva

    if hh["ct-is-month-end"] and not ds.is_displaced:
        month_to = start_date
        month_from = to_utc(ct_datetime(hh["ct-year"], hh["ct-month"], 1))
        md_kva = 0
        days_in_month = 0
        for dsc in ds.get_data_sources(month_from, month_to):
            for datum in dsc.hh_data:
                md_kva = max(
                    md_kva,
                    (
                        datum["msp-kw"] ** 2
                        + max(datum["imp-msp-kvar"], datum["exp-msp-kvar"]) ** 2
                    )
                    ** 0.5,
                )
                if datum["ct-decimal-hour"] == 0:
                    days_in_month += 1

        excess_kva = max(md_kva - ds.sc, 0)

        if "excess-gbp-per-kva-per-day" in tariff and excess_kva != 0:
            rate = float(tariff["excess-gbp-per-kva-per-day"])
            hh["duos-excess-availability-kva"] = excess_kva
            rate = float(tariff["excess-gbp-per-kva-per-day"])
            hh["duos-excess-availability-rate"] = rate
            hh["duos-excess-availability-days"] = days_in_month
            hh["duos-excess-availability-gbp"] = rate * excess_kva * days_in_month
Ejemplo n.º 13
0
def create_csv(f, sess, start_date, finish_date, contract_id):
    caches = {}
    writer = csv.writer(f, lineterminator="\n")
    contract = Contract.get_supplier_by_id(sess, contract_id)
    forecast_date = chellow.computer.forecast_date()

    start_date_ct, finish_date_ct = to_ct(start_date), to_ct(finish_date)

    month_pairs = c_months_u(
        start_year=start_date_ct.year,
        start_month=start_date_ct.month,
        finish_year=finish_date_ct.year,
        finish_month=finish_date_ct.month,
    )

    bill_titles = contract_func(caches, contract, "virtual_bill_titles")()

    for tpr in (
        sess.query(Tpr)
        .join(MeasurementRequirement)
        .join(Ssc)
        .join(Era)
        .filter(
            Era.start_date <= finish_date,
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
            or_(
                Era.imp_supplier_contract == contract,
                Era.exp_supplier_contract == contract,
            ),
        )
        .order_by(Tpr.code)
        .distinct()
    ):
        for suffix in ("-kwh", "-rate", "-gbp"):
            bill_titles.append(tpr.code + suffix)
    writer.writerow(
        ["MPAN Core", "Site Code", "Site Name", "Account", "From", "To"] + bill_titles
    )
    vb_func = contract_func(caches, contract, "virtual_bill")

    for month_start, month_finish in month_pairs:
        period_start = hh_max(start_date, month_start)
        period_finish = hh_min(finish_date, month_finish)

        for era in (
            sess.query(Era)
            .filter(
                or_(
                    Era.imp_supplier_contract == contract,
                    Era.exp_supplier_contract == contract,
                ),
                Era.start_date <= period_finish,
                or_(Era.finish_date == null(), Era.finish_date >= period_start),
            )
            .order_by(Era.imp_mpan_core)
        ):
            try:
                vals = _process_era(
                    sess,
                    caches,
                    vb_func,
                    forecast_date,
                    bill_titles,
                    contract,
                    period_start,
                    period_finish,
                    era,
                )
                writer.writerow(csv_make_val(v) for v in vals)
            except BadRequest as e:
                raise BadRequest(
                    f"Problem with {chellow.utils.url_root}eras/{era.id}/edit "
                    f"{e.description}"
                )
Ejemplo n.º 14
0
def _write_sites(sess, caches, writer, year, site_id):
    titles = (
        "Site Code",
        "Site Name",
        "Displaced TRIAD 1 Date",
        "Displaced TRIAD 1 MSP kW",
        "Displaced TRIAD LAF",
        "Displaced TRIAD 1 GSP kW",
        "Displaced TRIAD 2 Date",
        "Displaced TRIAD 2 MSP kW",
        "Displaced TRIAD 2 LAF",
        "Displaced TRIAD 2 GSP kW",
        "Displaced TRIAD 3 Date",
        "Displaced TRIAD 3 MSP kW",
        "Displaced TRIAD 3 LAF",
        "Displaced TRIAD 3 GSP kW",
        "Displaced GSP kW",
        "Displaced Rate GBP / kW",
        "GBP",
    )
    writer.writerow(titles)

    march_finish_ct = ct_datetime(year, 4, 1) - HH
    march_finish_utc = to_utc(march_finish_ct)
    march_start_ct = ct_datetime(year, 3, 1)
    march_start_utc = to_utc(march_start_ct)
    year_start = to_utc(ct_datetime(year - 1, 4, 1))

    forecast_date = chellow.computer.forecast_date()

    sites = _make_sites(sess, year_start, march_finish_utc, site_id,
                        ("gen", "gen-net"))

    scalar_names = {"triad-actual-gsp-kw", "triad-actual-gbp"}

    rate_names = {"triad-actual-rate", "triad-estimate-rate"}

    for i in range(1, 4):
        pref = "triad-actual-" + str(i) + "-"
        for suf in ("msp-kw", "gsp-kw"):
            scalar_names.add(pref + suf)
        for suf in ("date", "status", "laf"):
            rate_names.add(pref + suf)

    for site in sites:
        displaced_era = None
        for month_start, month_finish in sorted(c_months_u(start_year=year - 1,
                                                           start_month=4,
                                                           months=12),
                                                reverse=True):
            displaced_era = chellow.computer.displaced_era(
                sess, caches, site, month_start, month_finish, forecast_date)
            if displaced_era is not None:
                break

        if displaced_era is None:
            break

        site_ds = chellow.computer.SiteSource(
            sess,
            site,
            march_start_utc,
            march_finish_utc,
            forecast_date,
            caches,
            displaced_era,
        )
        chellow.duos.duos_vb(site_ds)
        chellow.triad.hh(site_ds)

        for hh in site_ds.hh_data:
            bill_hh = site_ds.supplier_bill_hhs[hh["start-date"]]
            for k in scalar_names & hh.keys():
                bill_hh[k] = hh[k]

            for k in rate_names & hh.keys():
                bill_hh[k] = {hh[k]}

        bill = reduce_bill_hhs(site_ds.supplier_bill_hhs)
        values = [site.code, site.name]
        for i in range(1, 4):
            triad_prefix = "triad-actual-" + str(i) + "-"
            for suffix in ("date", "msp-kw", "laf", "gsp-kw"):
                values.append(csv_make_val(bill[triad_prefix + suffix]))

        for suffix in ("gsp-kw", "rate", "gbp"):
            values.append(csv_make_val(bill["triad-actual-" + suffix]))

        writer.writerow(values)

        # Avoid long-running transaction
        sess.rollback()
Ejemplo n.º 15
0
def fetch_cvs(sess, log_f):
    log_f("Starting to check GCv rates.")
    contract = Contract.get_non_core_by_name(sess, "g_cv")
    latest_rs = (sess.query(RateScript).filter(
        RateScript.contract == contract).order_by(
            RateScript.start_date.desc()).first())
    latest_rs_id = latest_rs.id
    latest_rs_start_date_ct = to_ct(latest_rs.start_date)

    month_pairs = list(
        c_months_u(
            start_year=latest_rs_start_date_ct.year,
            start_month=latest_rs_start_date_ct.month,
            months=2,
        ))
    month_start, month_finish = month_pairs[1]

    now = utc_datetime_now()
    props = contract.make_properties()
    if not props.get("enabled", False):
        log_f("The automatic importer is disabled. To enable it, edit the "
              "contract properties to set 'enabled' to true.")
        return

    search_start = month_start - relativedelta(days=1)
    search_finish = month_finish + relativedelta(days=1)
    if now <= search_finish:
        return

    url = props["url"]
    log_f(f"Checking to see if data is available "
          f"from {hh_format(search_start)} to "
          f"{hh_format(search_finish)} at {url}")

    res = requests.post(
        url,
        data={
            "LatestValue":
            "true",
            "PublicationObjectIds":
            "408:28,+408:5328,+408:5320,+408:5291,"
            "+408:5366,+408:5312,+408:5346,+408:5324,+408:5316,+408:5308,"
            "+408:5336,+408:5333,+408:5342,+408:5354,+408:82,+408:70,"
            "+408:59,+408:38,+408:49",
            "PublicationObjectStagingIds":
            "PUBOBJ1660,PUBOB4507,PUBOB4508,"
            "PUBOB4510,PUBOB4509,PUBOB4511,PUBOB4512,PUBOB4513,PUBOB4514,"
            "PUBOB4515,PUBOB4516,PUBOB4517,PUBOB4518,PUBOB4519,PUBOB4521,"
            "PUBOB4520,PUBOB4522,PUBOBJ1661,PUBOBJ1662",
            "Applicable":
            "applicableFor",
            "PublicationObjectCount":
            "19",
            "FromUtcDatetime":
            param_format(search_start),
            "ToUtcDateTime":
            param_format(search_finish),
            "FileType":
            "Csv",
        },
    )
    log_f(f"Received {res.status_code} {res.reason}")

    month_cv = defaultdict(dict)
    cf = csv.reader(res.text.splitlines())
    row = next(cf)  # Skip title row
    last_date = utc_datetime(1900, 1, 1)
    for row in cf:
        applicable_at_str = row[0]
        applicable_for_str = row[1]
        applicable_for = to_utc(
            to_ct(Datetime.strptime(applicable_for_str, "%d/%m/%Y")))
        data_item = row[2]
        value_str = row[3]

        if "LDZ" in data_item and month_start <= applicable_for < month_finish:
            ldz = data_item[-3:-1]
            cvs = month_cv[ldz]
            applicable_at = to_utc(
                to_ct(Datetime.strptime(applicable_at_str,
                                        "%d/%m/%Y %H:%M:%S")))
            last_date = max(last_date, applicable_at)
            cv = Decimal(value_str)
            try:
                existing = cvs[applicable_for.day]
                if applicable_at > existing["applicable_at"]:
                    existing["cv"] = cv
                    existing["applicable_at"] = applicable_at
            except KeyError:
                cvs[applicable_for.day] = {
                    "cv": cv,
                    "applicable_at": applicable_at
                }

    all_equal = len(set(map(len, month_cv.values()))) <= 1
    if last_date + Timedelta(days=1) > month_finish and all_equal:
        log_f("The whole month's data is there.")
        script = {"cvs": month_cv}
        contract = Contract.get_non_core_by_name(sess, "g_cv")
        rs = RateScript.get_by_id(sess, latest_rs_id)
        contract.update_rate_script(sess, rs, rs.start_date, month_finish,
                                    loads(rs.script))
        sess.flush()
        contract.insert_rate_script(sess, month_start, script)
        sess.commit()
        log_f("Added new rate script.")
    else:
        log_f(f"There isn't a whole month there yet. The "
              f"last date is {hh_format(last_date)}.")