Пример #1
0
    def __next__(self):
        try:
            self.line_number, self.values = next(self.shredder)
            mpan_core_str = self.get_field(0, "MPAN Core")
            datum = {'mpan_core': parse_mpan_core(mpan_core_str)}
            channel_type_str = self.get_field(1, "Channel Type")
            datum['channel_type'] = parse_channel_type(channel_type_str)

            start_date_str = self.get_field(2, "Start Date")
            datum['start_date'] = validate_hh_start(
                to_utc(Datetime.strptime(start_date_str, "%Y-%m-%d %H:%M")))

            value_str = self.get_field(3, "Value")
            datum['value'] = Decimal(value_str)

            status = self.get_field(4, "Status")
            if len(status) != 1:
                raise BadRequest(
                    "The status character must be one character in length.")
            datum['status'] = status
            return datum
        except BadRequest as e:
            e.description = ''.join(
                "Problem at line number: ", str(self.line_number), ": ",
                str(self.values), ": ", e.description)
            raise e
Пример #2
0
def get_date(row, name, datemode):
    val = get_value(row, name)
    if val == '':
        return None
    else:
        dt_raw = Datetime(*xldate_as_tuple(get_value(row, name), datemode))
        return to_utc(to_ct(dt_raw))
Пример #3
0
 def date_val(title):
     try:
         return to_utc(Datetime.strptime(val(title), "%d/%m/%Y"))
     except ValueError as e:
         raise BadRequest(
             "At line number " + str(self._line_number) +
             ", while trying to find the value of " + title +
             " the date couldn't be parsed. " + str(e) +
             " The full line is " + self.last_line)
    def make_raw_bills(self):
        raw_bills = []
        last_bill_reference = None
        raw_bill = None
        for self._line_number, row in enumerate(self.rows[1:]):
            if row[0].value is None:
                continue
            bill_reference = str(row[8].value)
            if last_bill_reference != bill_reference:
                breakdown = defaultdict(int, {'gas_rate': set()})
                raw_bill = {
                    'reference': bill_reference, 'reads': [], 'kwh': 0,
                    'breakdown': breakdown, 'net_gbp': Decimal('0.00'),
                    'vat_gbp': Decimal('0.00'), 'gross_gbp': Decimal('0.00'),
                    'raw_lines': self.titles + '\n'}
                raw_bills.append(raw_bill)
                last_bill_reference = bill_reference
            if row[9].value is None:
                raw_bill['account'] = row[5].value
                raw_bill['issue_date'] = row[6].value
                if row[7].value is None:
                    raw_bill['bill_type_code'] = 'N'
                else:
                    raw_bill['bill_type_code'] = 'W'

                raw_bill['msn'] = row[9].value
                raw_bill['mprn'] = str(row[10].value)
                raw_bill['start_date'] = to_utc(row[17].value)
                raw_bill['finish_date'] = to_utc(row[18].value) + \
                    relativedelta(days=1) - HH
                breakdown['vat_5pc'] += row[28].value
                breakdown['vat_15pc'] += row[29].value
                breakdown['vat_17_5pc'] += row[30].value
                breakdown['vat_20pc'] += row[31].value
                raw_bill['vat_gbp'] += to_money(row[32])
                raw_bill['breakdown']['standing_gbp'] = row[33].value
                raw_bill['gross_gbp'] += to_money(row[34])
                raw_bill['raw_lines'] += ','.join(
                    str(c.value) for c in islice(row, 35)) + '\n'
                raw_bill['net_gbp'] += raw_bill['gross_gbp'] - \
                    raw_bill['vat_gbp']
            else:
                read = {
                    'msn': row[9].value,
                    'mprn': str(row[10].value),
                    'prev_value': row[11].value,
                    'prev_date': to_utc(row[12].value),
                    'prev_type_code': row[13].value[-1],
                    'pres_value': row[14].value,
                    'pres_date': to_utc(row[15].value),
                    'pres_type_code': row[16].value[-1],
                    'correction_factor': row[20].value,
                    'calorific_value': row[21].value,
                    'units': row[25].value}
                vat_gbp = to_money(row[32])
                gross_gbp = to_money(row[34])
                raw_bill['reads'].append(read)
                raw_bill['kwh'] += row[22].value
                raw_bill['net_gbp'] += gross_gbp - vat_gbp
                raw_bill['vat_gbp'] += vat_gbp
                raw_bill['gross_gbp'] += gross_gbp
                raw_bill['raw_lines'] += ','.join(
                    str(c.value) for c in islice(row, 35)) + '\n'
                breakdown['gas_rate'].add(row[23].value)
                breakdown['units_consumed'] += row[24].value
                breakdown['gas_gbp'] += row[26].value
                breakdown['ccl_gbp'] += row[27].value
                breakdown['vat_5pc'] += row[28].value
                breakdown['vat_15pc'] += row[29].value
                breakdown['vat_17_5pc'] += row[30].value
                breakdown['vat_20pc'] += row[31].value
        for raw_bill in raw_bills:
            breakdown = raw_bill['breakdown']
            if len(breakdown['gas_rate']) == 1:
                breakdown['gas_rate'] = breakdown['gas_rate'].pop()
            else:
                breakdown['gas_rate'] = ''
            raw_bill['breakdown'] = dict(breakdown)
        return raw_bills
Пример #5
0
    def process_url(self, sess, url, contract):
        self.log("Checking to see if there's any new data at " + url)
        res = requests.get(url)
        self.log("Received " + str(res.status_code) + " " + res.reason)
        book = xlrd.open_workbook(file_contents=res.content)
        sheet = book.sheet_by_index(0)
        cache = {}

        for row_index in range(1, sheet.nrows):
            row = sheet.row(row_index)

            raw_date_val = row[0].value
            if isinstance(raw_date_val, float):
                raw_date = Datetime(*xlrd.xldate_as_tuple(raw_date_val, book.datemode))
            elif isinstance(raw_date_val, str):
                separator = raw_date_val[2]
                fmat = separator.join(("%d", "%m", "%Y"))
                raw_date = Datetime.strptime(raw_date_val, fmat)
            else:
                raise BadRequest(
                    "Type of date field " + str(raw_date_val) + " not recognized."
                )

            hh_date_ct = to_ct(raw_date)
            hh_date_ct += relativedelta(minutes=30 * (int(row[1].value) - 1))
            hh_date = to_utc(hh_date_ct)
            price = Decimal(str(row[2].value))
            run = row[5].value

            try:
                rs, rates, rts = cache[hh_date.year][hh_date.month]
            except KeyError:
                _save_cache(sess, cache)

                try:
                    yr_cache = cache[hh_date.year]
                except KeyError:
                    yr_cache = cache[hh_date.year] = {}

                rs = (
                    sess.query(RateScript)
                    .filter(
                        RateScript.contract == contract,
                        RateScript.start_date <= hh_date,
                        or_(
                            RateScript.finish_date == null(),
                            RateScript.finish_date >= hh_date,
                        ),
                    )
                    .first()
                )
                while rs is None:
                    self.log("There's no rate script at " + hh_format(hh_date) + ".")
                    latest_rs = (
                        sess.query(RateScript)
                        .filter(RateScript.contract == contract)
                        .order_by(RateScript.start_date.desc())
                        .first()
                    )
                    contract.update_rate_script(
                        sess,
                        latest_rs,
                        latest_rs.start_date,
                        latest_rs.start_date + relativedelta(months=2) - HH,
                        loads(latest_rs.script),
                    )
                    new_rs_start = latest_rs.start_date + relativedelta(months=1)
                    contract.insert_rate_script(sess, new_rs_start, {})
                    sess.commit()
                    self.log(
                        "Added a rate script starting at "
                        + hh_format(new_rs_start)
                        + "."
                    )

                    rs = (
                        sess.query(RateScript)
                        .filter(
                            RateScript.contract == contract,
                            RateScript.start_date <= hh_date,
                            or_(
                                RateScript.finish_date == null(),
                                RateScript.finish_date >= hh_date,
                            ),
                        )
                        .first()
                    )

                rates = loads(rs.script)
                try:
                    rts = rates["rates_gbp_per_mwh"]
                except KeyError:
                    rts = rates["rates_gbp_per_mwh"] = {}
                yr_cache[hh_date.month] = rs, rates, rts

            key = key_format(hh_date)
            try:
                existing = rts[key]
            except KeyError:
                existing = rts[key] = {}

            if run not in existing:
                existing[run] = price
                self.log(
                    "Added rate at " + hh_format(hh_date) + " for run " + run + "."
                )

        _save_cache(sess, cache)
        book = sheet = None
Пример #6
0
    def make_raw_bills(self):
        raw_bills = []
        breakdown = None

        for self.line_number, code in enumerate(self.parser):
            if code == "BCD":
                ivdt = self.parser.elements[0]
                issue_date = to_utc(to_ct_date(ivdt[0]))

                invn = self.parser.elements[2]
                reference = invn[0]
                account = "SA" + reference[:9]

                btcd = self.parser.elements[5]
                bill_type_code = btcd[0]

                sumo = self.parser.elements[7]
                start_date = to_start_date(sumo[0])
                if to_ct_date(sumo[1]) in (
                        ct_datetime(2020, 4, 1),
                        ct_datetime(2020, 3, 16),
                ):
                    finish_date = to_start_date(sumo[1]) - HH
                else:
                    finish_date = to_finish_date(sumo[1])

            elif code == "MHD":
                type = self.parser.elements[1]
                message_type = type[0]
                if message_type == "UTLBIL":
                    issue_date = None
                    start_date = None
                    finish_date = None
                    account = None
                    reference = None
                    net = Decimal("0.00")
                    vat = Decimal("0.00")
                    gross = Decimal("0.00")
                    kwh = Decimal(0)
                    reads = []
                    bill_type_code = None
                    mpan_core = None
                    breakdown = defaultdict(int, {"raw-lines": []})

            elif code == "CCD":
                ccde = self.parser.elements[1]
                consumption_charge_indicator = ccde[0]

                if consumption_charge_indicator == "1":
                    prdt = self.parser.elements[6]
                    pvdt = self.parser.elements[7]

                    pres_read_date = to_finish_date(prdt[0])
                    prev_read_date = to_finish_date(pvdt[0])

                    tmod = self.parser.elements[3]
                    mtnr = self.parser.elements[4]
                    mloc = self.parser.elements[5]

                    mpan = mloc[0]
                    mpan_core = " ".join(
                        [mpan[:2], mpan[2:6], mpan[6:10], mpan[10:13]])
                    mpan = (mpan[13:15] + " " + mpan[15:18] + " " + mpan[18:] +
                            " " + mpan_core)

                    prrd = self.parser.elements[9]
                    pres_read_type = read_type_map[prrd[1]]
                    prev_read_type = read_type_map[prrd[3]]

                    adjf = self.parser.elements[12]
                    cons = self.parser.elements[13]

                    coefficient = Decimal(adjf[1]) / Decimal(100000)
                    pres_reading_value = Decimal(prrd[0])
                    prev_reading_value = Decimal(prrd[2])
                    msn = mtnr[0]
                    tpr_native = tmod[0]
                    if tpr_native not in tmod_map:
                        raise BadRequest(
                            "The TPR code " + tpr_native +
                            " can't be found in the TPR list for mpan " +
                            mpan + ".")
                    tpr_code = tmod_map[tpr_native]
                    if tpr_code == "kW":
                        units = "kW"
                        tpr_code = None
                    elif tpr_code == "kVA":
                        units = "kVA"
                        tpr_code = None
                    else:
                        units = "kWh"
                        kwh += to_decimal(cons) / Decimal("1000")

                    if mpan_core in WRONG_TPRS and pres_read_date == to_utc(
                            ct_datetime(2020, 4, 1, 23, 30)):
                        pres_read_date = to_utc(ct_datetime(
                            2020, 4, 1, 22, 30))
                        reads.append({
                            "msn":
                            "Separator Read",
                            "mpan":
                            mpan,
                            "coefficient":
                            coefficient,
                            "units":
                            units,
                            "tpr_code":
                            tpr_code,
                            "prev_date":
                            to_utc(ct_datetime(2020, 4, 1, 23)),
                            "prev_value":
                            0,
                            "prev_type_code":
                            "N",
                            "pres_date":
                            to_utc(ct_datetime(2020, 4, 1, 23)),
                            "pres_value":
                            0,
                            "pres_type_code":
                            "N",
                        })
                    reads.append({
                        "msn": msn,
                        "mpan": mpan,
                        "coefficient": coefficient,
                        "units": units,
                        "tpr_code": tpr_code,
                        "prev_date": prev_read_date,
                        "prev_value": prev_reading_value,
                        "prev_type_code": prev_read_type,
                        "pres_date": pres_read_date,
                        "pres_value": pres_reading_value,
                        "pres_type_code": pres_read_type,
                    })

                elif consumption_charge_indicator == "2":
                    # tcod = self.parser.elements[2]
                    tmod = self.parser.elements[3]
                    mtnr = self.parser.elements[4]
                    mloc = self.parser.elements[5]

                    mpan = mloc[0]
                    mpan_core = " ".join(
                        [mpan[:2], mpan[2:6], mpan[6:10], mpan[10:13]])
                    mpan = (mpan[13:15] + " " + mpan[15:18] + " " + mpan[18:] +
                            " " + mpan_core)

                    prdt = self.parser.elements[6]
                    pvdt = self.parser.elements[7]

                    pres_read_date = to_finish_date(prdt[0])
                    prev_read_date = to_finish_date(pvdt[0])

                    ndrp = self.parser.elements[8]
                    prrd = self.parser.elements[9]
                    pres_read_type = read_type_map[prrd[1]]
                    prev_read_type = read_type_map[prrd[3]]

                    adjf = self.parser.elements[12]
                    cona = self.parser.elements[13]

                    coefficient = Decimal(adjf[1]) / Decimal(100000)
                    pres_reading_value = Decimal(prrd[0])
                    prev_reading_value = Decimal(prrd[2])
                    msn = mtnr[0]
                    tpr_code = tmod[0]
                    if tpr_code not in tmod_map:
                        raise BadRequest(
                            "The TPR code " + tpr_code +
                            " can't be found in the TPR list for mpan " +
                            mpan + ".")
                    tpr = tmod_map[tpr_code]
                    if tpr == "kW":
                        units = "kW"
                        tpr = None
                        prefix = "md-"
                    elif tpr == "kVA":
                        units = "kVA"
                        tpr = None
                        prefix = "md-"
                    else:
                        units = "kWh"
                        kwh += to_decimal(cona) / Decimal("1000")
                        prefix = tpr + "-"

                    nuct = self.parser.elements[15]
                    breakdown[prefix +
                              "kwh"] += to_decimal(nuct) / Decimal("1000")
                    cppu = self.parser.elements[18]
                    rate_key = prefix + "rate"
                    if rate_key not in breakdown:
                        breakdown[rate_key] = set()
                    breakdown[rate_key].add(
                        to_decimal(cppu) / Decimal("100000"))
                    ctot = self.parser.elements[19]
                    breakdown[prefix +
                              "gbp"] += to_decimal(ctot) / Decimal("100")

                    if mpan_core in WRONG_TPRS and pres_read_date == to_utc(
                            ct_datetime(2020, 4, 1, 23, 30)):
                        pres_read_date = to_utc(ct_datetime(
                            2020, 4, 1, 22, 30))
                        reads.append({
                            "msn":
                            "Separator Read",
                            "mpan":
                            mpan,
                            "coefficient":
                            coefficient,
                            "units":
                            units,
                            "tpr_code":
                            tpr,
                            "prev_date":
                            to_utc(ct_datetime(2020, 4, 1, 23)),
                            "prev_value":
                            0,
                            "prev_type_code":
                            "N",
                            "pres_date":
                            to_utc(ct_datetime(2020, 4, 1, 23)),
                            "pres_value":
                            0,
                            "pres_type_code":
                            "N",
                        })

                    reads.append({
                        "msn": msn,
                        "mpan": mpan,
                        "coefficient": coefficient,
                        "units": units,
                        "tpr_code": tpr,
                        "prev_date": prev_read_date,
                        "prev_value": prev_reading_value,
                        "prev_type_code": prev_read_type,
                        "pres_date": pres_read_date,
                        "pres_value": pres_reading_value,
                        "pres_type_code": pres_read_type,
                    })

                elif consumption_charge_indicator == "3":
                    # tcod = self.parser.elements[2]
                    tmod = self.parser.elements[3]
                    tmod0 = tmod[0]
                    if tmod0 == "CCL":
                        prefix = kwh_prefix = "ccl-"
                    elif tmod0 in ["CQFITC", "CMFITC"]:
                        prefix = "fit-"
                        kwh_prefix = "fit-msp-"
                    elif tmod0 == "FITARR":
                        prefix = kwh_prefix = "fit-reconciliation-"
                    else:
                        tpr_code = tmod0
                        if tpr_code not in tmod_map:
                            raise BadRequest(
                                "The TPR code " + tpr_code +
                                " can't be found in the TPR list for mpan " +
                                mpan + ".")
                        prefix = kwh_prefix = tmod_map[tpr_code] + "-"

                    mtnr = self.parser.elements[4]
                    ndrp = self.parser.elements[8]
                    cona = self.parser.elements[13]
                    nuct = self.parser.elements[15]
                    breakdown[kwh_prefix +
                              "kwh"] += to_decimal(nuct) / Decimal("1000")
                    cppu = self.parser.elements[18]

                    rate_key = prefix + "rate"
                    if rate_key not in breakdown:
                        breakdown[rate_key] = set()
                    breakdown[rate_key].add(
                        to_decimal(cppu) / Decimal("100000"))

                    ctot = self.parser.elements[19]
                    breakdown[prefix +
                              "gbp"] += to_decimal(ctot) / Decimal("100")
                elif consumption_charge_indicator == "4":
                    # tcod = self.parser.elements[2]
                    tmod = self.parser.elements[3]
                    tmod0 = tmod[0]

                    mtnr = self.parser.elements[4]
                    ndrp = self.parser.elements[8]
                    if len(ndrp[0]) > 0:
                        breakdown["standing-days"] += to_decimal(ndrp)
                    cona = self.parser.elements[13]
                    nuct = self.parser.elements[15]
                    cppu = self.parser.elements[18]
                    ctot = self.parser.elements[19]
                    if len(ctot[0]) > 0:
                        breakdown["standing-gbp"] += to_decimal(
                            ctot) / Decimal("100")
            elif code == "MTR":
                if message_type == "UTLBIL":

                    if mpan_core is None:
                        sess = Session()
                        era = (sess.query(Era).filter(
                            Era.imp_supplier_account == account).first())
                        if era is not None:
                            mpan_core = era.imp_mpan_core
                        sess.close()

                    raw_bill = {
                        "bill_type_code": bill_type_code,
                        "account": account,
                        "mpan_core": mpan_core,
                        "reference": reference,
                        "issue_date": issue_date,
                        "start_date": start_date,
                        "finish_date": finish_date,
                        "kwh": kwh,
                        "net": net,
                        "vat": vat,
                        "gross": gross,
                        "breakdown": breakdown,
                        "reads": reads,
                    }
                    raw_bills.append(raw_bill)
                    breakdown = None

            elif code == "MAN":
                madn = self.parser.elements[2]
                """
                pc_code = madn[3]
                mtc_code = madn[4]
                llfc_code = madn[5]
                """

                mpan_core = parse_mpan_core("".join(
                    (madn[0], madn[1], madn[2])))

            elif code == "VAT":
                uvla = self.parser.elements[5]
                net += to_decimal(uvla) / Decimal("100")
                uvtt = self.parser.elements[6]
                vat += to_decimal(uvtt) / Decimal("100")
                ucsi = self.parser.elements[7]
                gross += to_decimal(ucsi) / Decimal("100")

            if breakdown is not None:
                breakdown["raw-lines"].append(self.parser.line)

        return raw_bills
def test_supply(mocker, sess, client):
    site = Site.insert(sess, "22488", "Water Works")
    g_dn = GDn.insert(sess, "EE", "East of England")
    g_ldz = g_dn.insert_g_ldz(sess, "EA")
    g_exit_zone = g_ldz.insert_g_exit_zone(sess, "EA1")
    insert_g_units(sess)
    g_unit_M3 = GUnit.get_by_code(sess, "M3")
    participant = Participant.insert(sess, "CALB", "AK Industries")
    market_role_Z = MarketRole.get_by_code(sess, "Z")
    participant.insert_party(
        sess, market_role_Z, "None core", utc_datetime(2000, 1, 1), None, None
    )
    g_cv_rate_script = {
        "cvs": {
            "EA": {
                1: {"applicable_at": utc_datetime(2020, 10, 3), "cv": 39.2000},
            }
        }
    }
    Contract.insert_non_core(
        sess, "g_cv", "", {}, utc_datetime(2000, 1, 1), None, g_cv_rate_script
    )
    bank_holiday_rate_script = {"bank_holidays": []}
    Contract.insert_non_core(
        sess,
        "bank_holidays",
        "",
        {},
        utc_datetime(2000, 1, 1),
        None,
        bank_holiday_rate_script,
    )
    charge_script = """
import chellow.g_ccl
from chellow.g_engine import g_rates
from chellow.utils import reduce_bill_hhs


def virtual_bill_titles():
    return [
        'units_consumed', 'correction_factor', 'unit_code', 'unit_factor',
        'calorific_value', 'kwh', 'gas_rate', 'gas_gbp', 'ccl_rate',
        'standing_rate', 'standing_gbp', 'net_gbp', 'vat_gbp', 'gross_gbp',
        'problem']


def virtual_bill(ds):
    chellow.g_ccl.vb(ds)
    for hh in ds.hh_data:
        start_date = hh['start_date']
        bill_hh = ds.bill_hhs[start_date]
        bill_hh['units_consumed'] = hh['units_consumed']
        bill_hh['correction_factor'] = {hh['correction_factor']}
        bill_hh['unit_code'] = {hh['unit_code']}
        bill_hh['unit_factor'] = {hh['unit_factor']}
        bill_hh['calorific_value'] = {hh['calorific_value']}
        kwh = hh['kwh']
        bill_hh['kwh'] = kwh
        gas_rate = float(
            g_rates(ds.sess, ds.caches, db_id, start_date)['gas_rate'])
        bill_hh['gas_rate'] = {gas_rate}
        bill_hh['gas_gbp'] = gas_rate * kwh
        bill_hh['ccl_kwh'] = kwh
        ccl_rate = hh['ccl']
        bill_hh['ccl_rate'] = {ccl_rate}
        bill_hh['ccl_kwh'] = kwh
        bill_hh['ccl_gbp'] = kwh * ccl_rate
        if hh['utc_is_month_end']:
            standing_rate = float(
                g_rates(
                    ds.sess, ds.caches, db_id, start_date)['standing_rate'])
            bill_hh['standing_rate'] = {standing_rate}
            bill_hh['standing_gbp'] = standing_rate
        if hh['utc_decimal_hour'] == 0:
            pass

        bill_hh['net_gbp'] = sum(
            v for k, v in bill_hh.items() if k.endswith('gbp'))
        bill_hh['vat_gbp'] = 0
        bill_hh['gross_gbp'] = bill_hh['net_gbp'] + bill_hh['vat_gbp']

    ds.bill = reduce_bill_hhs(ds.bill_hhs)
"""
    g_contract_rate_script = {
        "gas_rate": 0.1,
        "standing_rate": 0.1,
    }
    g_contract = GContract.insert(
        sess,
        "Fusion 2020",
        charge_script,
        {},
        utc_datetime(2000, 1, 1),
        None,
        g_contract_rate_script,
    )
    insert_g_reading_frequencies(sess)
    g_reading_frequency_M = GReadingFrequency.get_by_code(sess, "M")
    msn = "hgeu8rhg"
    g_supply = site.insert_g_supply(
        sess,
        "87614362",
        "main",
        g_exit_zone,
        utc_datetime(2010, 1, 1),
        None,
        msn,
        1,
        g_unit_M3,
        g_contract,
        "d7gthekrg",
        g_reading_frequency_M,
    )
    g_batch = g_contract.insert_g_batch(sess, "b1", "Jan batch")

    breakdown = {"units_consumed": 771}
    insert_bill_types(sess)
    bill_type_N = BillType.get_by_code(sess, "N")
    insert_g_read_types(sess)
    g_read_type_A = GReadType.get_by_code(sess, "A")
    g_bill = g_batch.insert_g_bill(
        sess,
        g_supply,
        bill_type_N,
        "55h883",
        "dhgh883",
        utc_datetime(2019, 4, 3),
        utc_datetime(2015, 9, 1),
        utc_datetime(2015, 9, 30, 22, 30),
        Decimal("45"),
        Decimal("12.40"),
        Decimal("1.20"),
        Decimal("14.52"),
        "",
        breakdown,
    )
    g_bill.insert_g_read(
        sess,
        msn,
        g_unit_M3,
        Decimal("1"),
        Decimal("37"),
        Decimal("90"),
        utc_datetime(2015, 9, 1),
        g_read_type_A,
        Decimal("890"),
        utc_datetime(2015, 9, 25),
        g_read_type_A,
    )
    sess.commit()

    mock_file = StringIO()
    mock_file.close = mocker.Mock()
    mocker.patch(
        "chellow.reports.report_g_virtual_bills_hh.open", return_value=mock_file
    )
    mocker.patch(
        "chellow.reports.report_g_virtual_bills_hh.chellow.dloads.make_names",
        return_value=("a", "b"),
    )
    mocker.patch("chellow.reports.report_g_virtual_bills.os.rename")

    user = mocker.Mock()
    g_supply_id = g_supply.id
    start_date = to_utc(ct_datetime(2018, 2, 1))
    finish_date = to_utc(ct_datetime(2018, 2, 1, 0, 30))

    chellow.reports.report_g_virtual_bills_hh.content(
        g_supply_id, start_date, finish_date, user
    )

    mock_file.seek(0)
    table = list(csv.reader(mock_file))

    expected = [
        [
            "MPRN",
            "Site Code",
            "Site Name",
            "Account",
            "HH Start",
            "",
            "",
            "units_consumed",
            "correction_factor",
            "unit_code",
            "unit_factor",
            "calorific_value",
            "kwh",
            "gas_rate",
            "gas_gbp",
            "ccl_rate",
            "standing_rate",
            "standing_gbp",
            "net_gbp",
            "vat_gbp",
            "gross_gbp",
            "problem",
        ],
        [
            "87614362",
            "22488",
            "Water Works",
            "d7gthekrg",
            "2018-02-01 00:00",
            "",
            "",
            "0.6944444444444444",
            "1.0",
            "M3",
            "1.0",
            "39.2",
            "7.561728395061729",
            "0.1",
            "0.7561728395061729",
            "0.00198",
            "",
            "",
            "0.7711450617283951",
            "0",
            "0.7711450617283951",
            "",
            "ccl_gbp",
            "0.014972222222222222",
            "ccl_kwh",
            "7.561728395061729",
        ],
        [
            "87614362",
            "22488",
            "Water Works",
            "d7gthekrg",
            "2018-02-01 00:30",
            "",
            "",
            "0.6944444444444444",
            "1.0",
            "M3",
            "1.0",
            "39.2",
            "7.561728395061729",
            "0.1",
            "0.7561728395061729",
            "0.00198",
            "",
            "",
            "0.7711450617283951",
            "0",
            "0.7711450617283951",
            "",
            "ccl_gbp",
            "0.014972222222222222",
            "ccl_kwh",
            "7.561728395061729",
        ],
    ]

    match_tables(table, expected)
Пример #8
0
    def make_raw_bills(self):
        raw_bills = []
        last_key = None
        title_row = self.sheet.row(0)
        for row_index in range(1, self.sheet.nrows):
            row = self.sheet.row(row_index)
            mpan_core = parse_mpan_core(
                str(int(get_value(row, 'Meter Point'))))
            bill_period = get_value(row, 'Bill Period')
            start_date, finish_date = [
                to_utc(to_ct(Datetime.strptime(d, '%Y-%m-%d')))
                for d in bill_period.split(' - ')]
            finish_date = finish_date + relativedelta(days=1) - HH
            key = (start_date, finish_date, mpan_core)
            from_date = get_date(row, 'From Date', self.book.datemode)
            # to_date = get_date(row, 'To Date', self.book.datemode) + \
            #    relativedelta(days=1) - HH
            issue_date = get_date(row, 'Bill Date', self.book.datemode)
            if last_key != key:
                last_key = key

                bd = {}
                bill = {
                    'bill_type_code': 'N', 'account': mpan_core,
                    'mpans': [mpan_core],
                    'reference': '_'.join(
                        (
                            start_date.strftime('%Y%m%d'),
                            finish_date.strftime('%Y%m%d'), mpan_core)),
                    'issue_date': issue_date, 'start_date': start_date,
                    'finish_date': finish_date, 'kwh': Decimal(0),
                    'net': Decimal('0.00'), 'vat': Decimal('0.00'),
                    'breakdown': bd, 'reads': []}
                raw_bills.append(bill)

            usage = get_value(row, 'Usage')
            usage_units = get_value(row, 'Usage Unit')
            price = get_value(row, 'Price')
            amount = get_value(row, 'Amount')
            amount_dec = Decimal(amount)
            product_item_name = get_value(row, 'Product Item Name')
            rate_name = get_value(row, 'Rate Name')
            if usage_units == 'kWh':
                if product_item_name == 'Renewables Obligation (RO)':
                    bill['kwh'] += round(Decimal(usage), 2)
                elif product_item_name == "Unit Rate":
                    bd_add(bd, 'sum-gsp-kwh', usage)
            description = get_value(row, 'Description')
            if description == 'Standard VAT@20%':
                bill['vat'] += round(amount_dec, 2)
            else:
                bill['net'] += round(amount_dec, 2)

            for q, qname in (
                    (usage, 'Usage'), (price, 'Price'), (amount, 'Amount')):
                try:
                    elem_key = ELEM_MAP[(description, rate_name, qname)]
                    bd_add(bd, elem_key, q)
                except KeyError:
                    pass

            duos_avail_prefix = "DUoS Availability ("
            duos_excess_avail_prefix = "DUoS Excess Availability ("
            if description.startswith("DUoS Availability"):
                if description.startswith(duos_avail_prefix):
                    bd_add(
                        bd, 'duos-availability-kva',
                        int(description[len(duos_avail_prefix):-5]))
                bd_add(bd, 'duos-availability-days', usage)
                bd_add(bd, 'duos-availability-rate', price)
                bd_add(bd, 'duos-availability-gbp', amount)
            elif description.startswith("DUoS Excess Availability"):
                if description.startswith(duos_excess_avail_prefix):
                    bd_add(
                        bd, 'duos-excess-availability-kva',
                        int(description[len(duos_excess_avail_prefix):-5]))
                bd_add(bd, 'duos-excess-availability-days', usage)
                bd_add(bd, 'duos-excess-availability-rate', price)
                bd_add(bd, 'duos-excess-availability-gbp', amount)
            elif description == 'Balancing Services Use of System (BSUoS)':
                if from_date == start_date:
                    bd_add(bd, 'bsuos-estimated-nbp-kwh', usage)
                    bd_add(bd, 'bsuos-estimated-rate', price)
                    bd_add(bd, 'bsuos-estimated-gbp', amount)
                elif amount < 0:
                    bd_add(bd, 'bsuos-prev-estimated-nbp-kwh', usage)
                    bd_add(bd, 'bsuos-prev-estimated-rate', price)
                    bd_add(bd, 'bsuos-prev-estimated-gbp', amount)
                else:
                    bd_add(bd, 'bsuos-prev-sf-nbp-kwh', usage)
                    bd_add(bd, 'bsuos-prev-sf-rate', price)
                    bd_add(bd, 'bsuos-prev-sf-gbp', amount)
            elif description.startswith("FiT Rec - "):
                bd_add(bd, 'fit-reconciliation-gbp', amount)
            elif description.startswith("CfD FiT Rec - "):
                bd_add(bd, 'cfd-fit-reconciliation-gbp', amount)

            bd['raw_lines'] = [str(title_row), str(row)]
            bill['gross'] = bill['net'] + bill['vat']

        for raw_bill in raw_bills:
            bd = raw_bill['breakdown']
            for k, v in tuple(bd.items()):
                if isinstance(v, set):
                    val = ', '.join(sorted(map(str, v)))
                else:
                    val = v
                bd[k] = val

        return raw_bills
Пример #9
0
def to_start_date(component):
    return to_utc(to_ct_date(component))
Пример #10
0
def content(user, file_name, file_like, dno_id):
    f = sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            "voltage_levels_general_importer.csv", user
        )
        f = open(running_name, mode="w", newline="")
        writer = csv.writer(f, lineterminator="\n")
        titles = (
            "# update",
            "llfc",
            "DNO Code",
            "LLFC Code",
            "Valid From",
            "LLFC Description",
            "Voltage Level Code",
            "Is Substation?",
            "Is Import?",
            "Valid To",
        )
        writer.writerow(titles)
        dno = Party.get_by_id(sess, dno_id)

        if not file_name.endswith(".xlsx"):
            raise BadRequest(f"The file extension for {file_name} isn't recognized.")

        book = openpyxl.load_workbook(file_like, data_only=True, read_only=True)

        TITLE_START = "annex 5 "
        ss_llfcs = []
        for sheet in book.worksheets:
            title = sheet.title.strip().lower()
            print(title)
            if title.startswith(TITLE_START):

                # if llfs_sheet is None:
                #     raise BadRequest(
                #         f"Can't find the sheet with LLFCs in. Looking for a "
                #        f"case-insenstive match on sheet titles begining "
                #       with "
                #        f"'{TITLE_START}'.")
                ss_llfcs.extend(tab_llfcs(sheet))

        now_ct = ct_datetime_now()
        if now_ct.month < 4:
            fy_year_ct = now_ct.year - 1
        else:
            fy_year_ct = now_ct.year

        fy_start = to_utc(ct_datetime(fy_year_ct, 4, 1))
        fy_finish = to_utc(ct_datetime(fy_year_ct + 1, 3, 31, 23, 30))
        for ss_llfc in ss_llfcs:
            ss_llfc_code = ss_llfc["code"]
            llfc = (
                sess.query(Llfc)
                .filter(
                    Llfc.dno == dno,
                    Llfc.code == ss_llfc_code,
                    Llfc.valid_from <= fy_finish,
                    or_(Llfc.valid_to == null(), Llfc.valid_to >= fy_start),
                )
                .first()
            )

            if llfc is None:
                raise BadRequest(
                    f"There is no LLFC with the code '{ss_llfc_code}' "
                    f"associated with the DNO {dno.code} from "
                    f"{hh_format(fy_start)} to {hh_format(fy_finish)}."
                )

            row = _make_row(llfc, ss_llfc)
            if row is not None:
                writer.writerow(row)

            # Avoid a long-running transaction
            sess.rollback()

    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg)
        writer.writerow([msg])
    finally:
        if sess is not None:
            sess.close()
        if f is not None:
            f.close()
            os.rename(running_name, finished_name)
Пример #11
0
def get_finish_date(row, name, datemode):
    d = get_date_ct(row, name, datemode)
    return to_utc(ct_datetime(d.year, d.month, d.day, 23, 30))
Пример #12
0
    def run(self):
        while not self.stopped.isSet():
            if self.lock.acquire(False):
                sess = book = sbp_sheet = ssp_sheet = None
                try:
                    sess = Session()
                    self.log("Starting to check System Prices.")
                    # ct_tz = pytz.timezone('Europe/London')
                    contract = Contract.get_non_core_by_name(
                        sess, 'system_price')
                    contract_props = contract.make_properties()

                    if contract_props.get('enabled', False):
                        for rscript in sess.query(RateScript).filter(
                                RateScript.contract == contract).order_by(
                                RateScript.start_date.desc()):
                            ns = json.loads(rscript.script)
                            rates = ns['gbp_per_nbp_mwh']
                            if len(rates) == 0:
                                fill_start = rscript.start_date
                                break
                            elif rates[
                                    key_format(
                                        rscript.finish_date)]['run'] == 'DF':
                                fill_start = rscript.finish_date + HH
                                break

                        config = Contract.get_non_core_by_name(
                            sess, 'configuration')
                        config_props = config.make_properties()

                        scripting_key = config_props.get(
                            ELEXON_PORTAL_SCRIPTING_KEY_KEY)
                        if scripting_key is None:
                            raise BadRequest(
                                "The property " +
                                ELEXON_PORTAL_SCRIPTING_KEY_KEY +
                                " cannot be found in the configuration "
                                "properties.")
                        url_str = contract_props['url'] + \
                            'file/download/BESTVIEWPRICES_FILE?key=' + \
                            scripting_key

                        self.log(
                            "Downloading from " + url_str +
                            " and extracting data from " +
                            hh_format(fill_start))

                        url = urllib.parse.urlparse(url_str)
                        if url.scheme == 'https':
                            conn = http.client.HTTPSConnection(
                                url.hostname, url.port)
                        else:
                            conn = http.client.HTTPConnection(
                                url.hostname, url.port)
                        conn.request("GET", url.path + '?' + url.query)

                        res = conn.getresponse()
                        self.log(
                            "Received " + str(res.status) + " " + res.reason)
                        data = res.read()
                        book = xlrd.open_workbook(file_contents=data)
                        sbp_sheet = book.sheet_by_index(1)
                        ssp_sheet = book.sheet_by_index(2)

                        sp_months = []
                        sp_month = None
                        for row_index in range(1, sbp_sheet.nrows):
                            sbp_row = sbp_sheet.row(row_index)
                            ssp_row = ssp_sheet.row(row_index)
                            raw_date = datetime.datetime(
                                *xlrd.xldate_as_tuple(
                                    sbp_row[0].value, book.datemode))
                            hh_date_ct = to_ct(raw_date)
                            hh_date = to_utc(hh_date_ct)
                            run_code = sbp_row[1].value
                            for col_idx in range(2, 52):
                                if hh_date >= fill_start:
                                    sbp_val = sbp_row[col_idx].value
                                    if sbp_val != '':
                                        if hh_date.day == 1 and \
                                                hh_date.hour == 0 and \
                                                hh_date.minute == 0:
                                            sp_month = {}
                                            sp_months.append(sp_month)
                                        ssp_val = ssp_row[col_idx].value
                                        sp_month[hh_date] = {
                                            'run': run_code,
                                            'sbp': sbp_val, 'ssp': ssp_val}
                                hh_date += HH
                        self.log("Successfully extracted data.")
                        last_date = sorted(sp_months[-1].keys())[-1]
                        if last_date.month == (last_date + HH).month:
                            del sp_months[-1]
                        if 'limit' in contract_props:
                            sp_months = sp_months[0:1]
                        for sp_month in sp_months:
                            sorted_keys = sorted(sp_month.keys())
                            month_start = sorted_keys[0]
                            month_finish = sorted_keys[-1]
                            rs = sess.query(RateScript).filter(
                                RateScript.contract == contract,
                                RateScript.start_date == month_start).first()
                            if rs is None:
                                self.log(
                                    "Adding a new rate script starting at " +
                                    hh_format(month_start) + ".")

                                latest_rs = sess.query(RateScript).filter(
                                    RateScript.contract == contract).\
                                    order_by(RateScript.start_date.desc()). \
                                    first()

                                contract.update_rate_script(
                                    sess, latest_rs, latest_rs.start_date,
                                    month_finish, latest_rs.script)
                                rs = contract.insert_rate_script(
                                    sess, month_start, '')
                                sess.flush()
                            script = {
                                'gbp_per_nbp_mwh': dict(
                                    (key_format(k), v)
                                    for k, v in sp_month.items())}
                            self.log(
                                "Updating rate script starting at " +
                                hh_format(month_start) + ".")
                            contract.update_rate_script(
                                sess, rs, rs.start_date, rs.finish_date,
                                json.dumps(
                                    script, indent='    ', sort_keys=True))
                            sess.commit()
                    else:
                        self.log(
                            "The automatic importer is disabled. To "
                            "enable it, edit the contract properties to "
                            "set 'enabled' to True.")

                except:
                    self.log("Outer problem " + traceback.format_exc())
                    sess.rollback()
                finally:
                    book = sbp_sheet = ssp_sheet = None
                    self.lock.release()
                    self.log("Finished checking System Price rates.")
                    if sess is not None:
                        sess.close()

            self.going.wait(24 * 60 * 60)
            self.going.clear()
Пример #13
0
def content(
        scenario_props, scenario_id, base_name, site_id, supply_id, user,
        compression):
    now = utc_datetime_now()
    report_context = {}
    future_funcs = {}
    report_context['future_funcs'] = future_funcs

    sess = None
    try:
        sess = Session()
        if scenario_props is None:
            scenario_contract = Contract.get_supplier_by_id(sess, scenario_id)
            scenario_props = scenario_contract.make_properties()
            base_name.append(scenario_contract.name)

        for contract in sess.query(Contract).join(MarketRole).filter(
                MarketRole.code == 'Z'):
            try:
                props = scenario_props[contract.name]
            except KeyError:
                continue

            try:
                rate_start = props['start_date']
            except KeyError:
                raise BadRequest(
                    "In " + scenario_contract.name + " for the rate " +
                    contract.name + " the start_date is missing.")

            if rate_start is not None:
                rate_start = to_utc(rate_start)

            lib = importlib.import_module('chellow.' + contract.name)

            if hasattr(lib, 'create_future_func'):
                future_funcs[contract.id] = {
                    'start_date': rate_start,
                    'func': lib.create_future_func(
                        props['multiplier'], props['constant'])}

        start_date = scenario_props['scenario_start']
        if start_date is None:
            start_date = utc_datetime(now.year, now.month, 1)
        else:
            start_date = to_utc(start_date)

        base_name.append(
            hh_format(start_date).replace(' ', '_').replace(':', '').
            replace('-', ''))
        months = scenario_props['scenario_duration']
        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        if 'kwh_start' in scenario_props:
            kwh_start = scenario_props['kwh_start']
        else:
            kwh_start = None

        if kwh_start is None:
            kwh_start = chellow.computer.forecast_date()
        else:
            kwh_start = to_utc(kwh_start)

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append('supply')
            base_name.append(str(supply.id))
            sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        site_rows = []
        era_rows = []
        changes = defaultdict(list, {})

        try:
            kw_changes = scenario_props['kw_changes']
        except KeyError:
            kw_changes = ''

        for row in csv.reader(io.StringIO(kw_changes)):
            if len(''.join(row).strip()) == 0:
                continue
            if len(row) != 4:
                raise BadRequest(
                    "Can't interpret the row " + str(row) + " it should be of "
                    "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER")
            site_code, typ, date_str, kw_str = row
            date = to_utc(Datetime.strptime(date_str.strip(), "%Y-%m-%d"))
            changes[site_code.strip()].append(
                {
                    'type': typ.strip(), 'date': date,
                    'multiplier': float(kw_str)})

        era_header_titles = [
            'creation-date', 'imp-mpan-core', 'imp-supplier-contract',
            'exp-mpan-core', 'exp-supplier-contract', 'metering-type',
            'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id',
            'site-name', 'associated-site-ids', 'month']
        site_header_titles = [
            'creation-date', 'site-id', 'site-name', 'associated-site-ids',
            'month', 'metering-type', 'sources', 'generator-types']
        summary_titles = [
            'import-net-kwh', 'export-net-kwh', 'import-gen-kwh',
            'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh',
            'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh',
            'import-net-gbp', 'export-net-gbp', 'import-gen-gbp',
            'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp',
            'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp',
            'billed-import-net-kwh', 'billed-import-net-gbp']

        title_dict = {}
        for cont_type, con_attr in (
                ('mop', Era.mop_contract), ('dc', Era.hhdc_contract),
                ('imp-supplier', Era.imp_supplier_contract),
                ('exp-supplier', Era.exp_supplier_contract)):
            titles = []
            title_dict[cont_type] = titles
            conts = sess.query(Contract).join(con_attr).join(Era.supply). \
                join(Source).filter(
                    Era.start_date <= finish_date, or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date),
                    Source.code.in_(('net', '3rd-party'))
                ).distinct().order_by(Contract.id)
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, 'virtual_bill_titles')
                if title_func is None:
                    raise Exception(
                        "For the contract " + cont.name +
                        " there doesn't seem to be a "
                        "'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \
            join(Era).filter(
                Era.start_date <= finish_date, or_(
                    Era.finish_date == null(),
                    Era.finish_date >= start_date)
            ).order_by(Tpr.code).distinct()
        for tpr in tpr_query.filter(Era.imp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['imp-supplier'].append(tpr.code + suffix)
        for tpr in tpr_query.filter(Era.exp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['exp-supplier'].append(tpr.code + suffix)

        era_rows.append(
            era_header_titles + summary_titles + [None] +
            ['mop-' + t for t in title_dict['mop']] +
            [None] + ['dc-' + t for t in title_dict['dc']] + [None] +
            ['imp-supplier-' + t for t in title_dict['imp-supplier']] +
            [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']])
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        month_start = start_date
        print("start date", start_date, "finish date", finish_date)
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_changes = changes[site.code]

                site_category = None
                site_sources = set()
                site_gen_types = set()
                site_month_data = defaultdict(int)
                calcs = []
                deltas = defaultdict(int)
                for era in sess.query(Era).join(SiteEra).filter(
                        SiteEra.site == site, SiteEra.is_physical == true(),
                        Era.start_date <= month_finish, or_(
                            Era.finish_date == null(),
                            Era.finish_date >= month_start)).options(
                        joinedload(Era.ssc),
                        joinedload(Era.hhdc_contract),
                        joinedload(Era.mop_contract),
                        joinedload(Era.imp_supplier_contract),
                        joinedload(Era.exp_supplier_contract),
                        joinedload(Era.channels),
                        joinedload(Era.imp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.exp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.cop),
                        joinedload(Era.supply).joinedload(
                            Supply.dno_contract),
                        joinedload(Era.supply).joinedload(
                            Supply.gsp_group),
                        joinedload(Era.mtc).joinedload(Mtc.meter_type),
                        joinedload(Era.pc), joinedload(Era.site_eras)):

                    supply = era.supply
                    if supply.generator_type is not None:
                        site_gen_types.add(supply.generator_type.code)

                    if supply_id is not None and supply.id != supply_id:
                        continue

                    if era.start_date > month_start:
                        ss_start = era.start_date
                    else:
                        ss_start = month_start

                    if hh_before(era.finish_date, month_finish):
                        ss_finish = era.finish_date
                    else:
                        ss_finish = month_finish

                    if era.imp_mpan_core is None:
                        imp_ss = None
                    else:
                        imp_ss = SupplySource(
                            sess, ss_start, ss_finish, kwh_start, era, True,
                            report_context)

                    if era.exp_mpan_core is None:
                        exp_ss = None
                        measurement_type = imp_ss.measurement_type
                    else:
                        exp_ss = SupplySource(
                            sess, ss_start, ss_finish, kwh_start, era, False,
                            report_context)
                        measurement_type = exp_ss.measurement_type

                    order = meter_order[measurement_type]
                    calcs.append(
                        (
                            order, era.imp_mpan_core, era.exp_mpan_core,
                            imp_ss, exp_ss))

                    if imp_ss is not None and len(era.channels) == 0:
                        for hh in imp_ss.hh_data:
                            deltas[hh['start-date']] += hh['msp-kwh']

                imp_net_delts = defaultdict(int)
                exp_net_delts = defaultdict(int)
                imp_gen_delts = defaultdict(int)

                displaced_era = chellow.computer.displaced_era(
                    sess, report_context, site, month_start, month_finish,
                    kwh_start)
                site_ds = chellow.computer.SiteSource(
                    sess, site, month_start, month_finish, kwh_start,
                    report_context, displaced_era)

                for hh in site_ds.hh_data:
                    try:
                        delta = deltas[hh['start-date']]
                        hh['import-net-kwh'] += delta
                        hh['used-kwh'] += delta
                    except KeyError:
                        pass

                for hh in site_ds.hh_data:
                    for change in site_changes:
                        if change['type'] == 'used' and \
                                change['date'] <= hh['start-date']:
                            used = change['multiplier'] * hh['used-kwh']
                            exp_net = max(
                                0, hh['import-gen-kwh'] -
                                hh['export-gen-kwh'] - used)
                            exp_net_delt = exp_net - hh['export-net-kwh']
                            exp_net_delts[hh['start-date']] += exp_net_delt
                            displaced = hh['import-gen-kwh'] - \
                                hh['export-gen-kwh'] - exp_net
                            imp_net = used - displaced
                            imp_delt = imp_net - hh['import-net-kwh']
                            imp_net_delts[hh['start-date']] += imp_delt

                            hh['import-net-kwh'] = imp_net
                            hh['used-kwh'] = used
                            hh['export-net-kwh'] = exp_net
                            hh['msp-kwh'] = displaced
                        elif change['type'] == 'generated' and \
                                change['date'] <= hh['start-date']:
                            imp_gen = change['multiplier'] * \
                                hh['import-gen-kwh']
                            imp_gen_delt = imp_gen - hh['import-gen-kwh']
                            exp_net = max(
                                0, imp_gen - hh['export-gen-kwh'] -
                                hh['used-kwh'])
                            exp_net_delt = exp_net - hh['export-net-kwh']
                            exp_net_delts[hh['start-date']] += exp_net_delt

                            displaced = imp_gen - hh['export-gen-kwh'] - \
                                exp_net

                            imp_net = hh['used-kwh'] - displaced
                            imp_net_delt = imp_net - hh['import-net-kwh']
                            imp_net_delts[hh['start-date']] += imp_net_delt

                            imp_gen_delts[hh['start-date']] += imp_gen_delt

                            hh['import-net-kwh'] = imp_net
                            hh['export-net-kwh'] = exp_net
                            hh['import-gen-kwh'] = imp_gen
                            hh['msp-kwh'] = displaced

                if displaced_era is not None and supply_id is None:
                    month_data = {}
                    for sname in (
                            'import-net', 'export-net', 'import-gen',
                            'export-gen', 'import-3rd-party',
                            'export-3rd-party', 'msp', 'used',
                            'used-3rd-party', 'billed-import-net'):
                        for xname in ('kwh', 'gbp'):
                            month_data[sname + '-' + xname] = 0

                    month_data['used-kwh'] = \
                        month_data['displaced-kwh'] = \
                        sum(hh['msp-kwh'] for hh in site_ds.hh_data)

                    disp_supplier_contract = \
                        displaced_era.imp_supplier_contract
                    disp_vb_function = chellow.computer.contract_func(
                        report_context, disp_supplier_contract,
                        'displaced_virtual_bill')
                    if disp_vb_function is None:
                        raise BadRequest(
                            "The supplier contract " +
                            disp_supplier_contract.name +
                            " doesn't have the displaced_virtual_bill() "
                            "function.")
                    disp_vb_function(site_ds)
                    disp_supplier_bill = site_ds.supplier_bill

                    try:
                        gbp = disp_supplier_bill['net-gbp']
                    except KeyError:
                        disp_supplier_bill['problem'] += 'For the supply ' + \
                            site_ds.mpan_core + ' the virtual bill ' + \
                            str(disp_supplier_bill) + ' from the contract ' + \
                            disp_supplier_contract.name + \
                            ' does not contain the net-gbp key.'

                    month_data['used-gbp'] = month_data['displaced-gbp'] = \
                        site_ds.supplier_bill['net-gbp']

                    out = [
                        now, None, disp_supplier_contract.name, None, None,
                        displaced_era.make_meter_category(), 'displaced', None,
                        None, None, None, site.code, site.name, '',
                        month_finish] + [
                            month_data[t] for t in summary_titles] + [None] + [
                        None] * len(title_dict['mop']) + [None] + [
                        None] * len(title_dict['dc']) + [None] + make_bill_row(
                            title_dict['imp-supplier'], disp_supplier_bill)

                    era_rows.append(out)
                    for k, v in month_data.items():
                        site_month_data[k] += v

                for i, (
                        order, imp_mpan_core, exp_mpan_core, imp_ss,
                        exp_ss) in enumerate(sorted(calcs, key=str)):
                    if imp_ss is None:
                        era = exp_ss.era
                    else:
                        era = imp_ss.era
                    supply = era.supply
                    source = supply.source
                    source_code = source.code
                    site_sources.add(source_code)
                    month_data = {}
                    for name in (
                            'import-net', 'export-net', 'import-gen',
                            'export-gen', 'import-3rd-party',
                            'export-3rd-party', 'displaced', 'used',
                            'used-3rd-party', 'billed-import-net'):
                        for sname in ('kwh', 'gbp'):
                            month_data[name + '-' + sname] = 0

                    if source_code == 'net':
                        delts = imp_net_delts
                    elif source_code == 'gen':
                        delts = imp_gen_delts
                    else:
                        delts = []

                    if len(delts) > 0 and imp_ss is not None:
                        for hh in imp_ss.hh_data:
                            diff = hh['msp-kwh'] + delts[hh['start-date']]
                            if diff < 0:
                                hh['msp-kwh'] = 0
                                hh['msp-kw'] = 0
                                delts[hh['start-date']] -= hh['msp-kwh']
                            else:
                                hh['msp-kwh'] += delts[hh['start-date']]
                                hh['msp-kw'] += hh['msp-kwh'] / 2
                                del delts[hh['start-date']]

                        left_kwh = sum(delts.values())
                        if left_kwh > 0:
                            first_hh = imp_ss.hh_data[0]
                            first_hh['msp-kwh'] += left_kwh
                            first_hh['msp-kw'] += left_kwh / 2

                    imp_supplier_contract = era.imp_supplier_contract
                    if imp_supplier_contract is not None:
                        kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data)
                        import_vb_function = contract_func(
                            report_context, imp_supplier_contract,
                            'virtual_bill')
                        if import_vb_function is None:
                            raise BadRequest(
                                "The supplier contract " +
                                imp_supplier_contract.name +
                                " doesn't have the virtual_bill() "
                                "function.")
                        import_vb_function(imp_ss)
                        imp_supplier_bill = imp_ss.supplier_bill

                        try:
                            gbp = imp_supplier_bill['net-gbp']
                        except KeyError:
                            gbp = 0
                            imp_supplier_bill['problem'] += \
                                'For the supply ' + \
                                imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['import-net-gbp'] += gbp
                            month_data['import-net-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['export-gen-kwh'] += kwh
                        elif source_code == '3rd-party':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == 'gen':
                            month_data['import-gen-kwh'] += kwh

                    exp_supplier_contract = era.exp_supplier_contract
                    if exp_supplier_contract is not None:
                        kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data)
                        export_vb_function = contract_func(
                            report_context, exp_supplier_contract,
                            'virtual_bill')
                        export_vb_function(exp_ss)

                        exp_supplier_bill = exp_ss.supplier_bill
                        try:
                            gbp = exp_supplier_bill['net-gbp']
                        except KeyError:
                            exp_supplier_bill['problem'] += \
                                'For the supply ' + imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['export-net-gbp'] += gbp
                            month_data['export-net-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['import-gen-kwh'] += kwh

                        elif source_code == '3rd-party':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == 'gen':
                            month_data['export-gen-kwh'] += kwh

                    sss = exp_ss if imp_ss is None else imp_ss
                    dc_contract = era.hhdc_contract
                    sss.contract_func(dc_contract, 'virtual_bill')(sss)
                    dc_bill = sss.dc_bill
                    gbp = dc_bill['net-gbp']

                    mop_contract = era.mop_contract
                    mop_bill_function = sss.contract_func(
                        mop_contract, 'virtual_bill')
                    mop_bill_function(sss)
                    mop_bill = sss.mop_bill
                    gbp += mop_bill['net-gbp']

                    if source_code in ('3rd-party', '3rd-party-reverse'):
                        month_data['import-3rd-party-gbp'] += gbp
                        month_data['used-3rd-party-gbp'] += gbp
                    else:
                        month_data['import-net-gbp'] += gbp
                    month_data['used-gbp'] += gbp

                    if source_code in ('gen', 'gen-net'):
                        generator_type = supply.generator_type.code
                        site_gen_types.add(generator_type)
                    else:
                        generator_type = None

                    era_category = era.make_meter_category()
                    if CATEGORY_ORDER[site_category] < \
                            CATEGORY_ORDER[era_category]:
                        site_category = era_category
                    era_associates = {
                        s.site.code for s in era.site_eras
                        if not s.is_physical}

                    for bill in sess.query(Bill).filter(
                            Bill.supply == supply,
                            Bill.start_date <= sss.finish_date,
                            Bill.finish_date >= sss.start_date):
                        bill_start = bill.start_date
                        bill_finish = bill.finish_date
                        bill_duration = (
                            bill_finish - bill_start).total_seconds() + \
                            (30 * 60)
                        overlap_duration = (
                            min(bill_finish, sss.finish_date) -
                            max(bill_start, sss.start_date)
                            ).total_seconds() + (30 * 60)
                        overlap_proportion = overlap_duration / bill_duration
                        month_data['billed-import-net-kwh'] += \
                            overlap_proportion * float(bill.kwh)
                        month_data['billed-import-net-gbp'] += \
                            overlap_proportion * float(bill.net)

                    out = [
                        now, era.imp_mpan_core, (
                            None if imp_supplier_contract is None else
                            imp_supplier_contract.name),
                        era.exp_mpan_core, (
                            None if exp_supplier_contract is None else
                            exp_supplier_contract.name),
                        era_category, source_code, generator_type, supply.name,
                        era.msn, era.pc.code, site.code, site.name,
                        ','.join(sorted(list(era_associates))),
                        month_finish] + [
                        month_data[t] for t in summary_titles] + [None] + \
                        make_bill_row(title_dict['mop'], mop_bill) + [None] + \
                        make_bill_row(title_dict['dc'], dc_bill)
                    if imp_supplier_contract is None:
                        out += [None] * (len(title_dict['imp-supplier']) + 1)
                    else:
                        out += [None] + make_bill_row(
                            title_dict['imp-supplier'], imp_supplier_bill)
                    if exp_supplier_contract is not None:
                        out += [None] + make_bill_row(
                            title_dict['exp-supplier'], exp_supplier_bill)

                    for k, v in month_data.items():
                        site_month_data[k] += v
                    era_rows.append(out)

                site_rows.append(
                    [
                        now, site.code, site.name, ', '.join(
                            s.code for s in site.find_linked_sites(
                                sess, month_start, month_finish)),
                        month_finish, site_category,
                        ', '.join(sorted(list(site_sources))),
                        ', '.join(sorted(list(site_gen_types)))] +
                    [site_month_data[k] for k in summary_titles])
            write_spreadsheet(rf, compression, site_rows, era_rows)
            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Пример #14
0
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user,
            compression):
    now = utc_datetime_now()
    report_context = {}

    try:
        comp = report_context['computer']
    except KeyError:
        comp = report_context['computer'] = {}

    try:
        rate_cache = comp['rates']
    except KeyError:
        rate_cache = comp['rates'] = {}

    try:
        ind_cont = report_context['contract_names']
    except KeyError:
        ind_cont = report_context['contract_names'] = {}

    sess = None
    try:
        sess = Session()
        if scenario_props is None:
            scenario_contract = Contract.get_supplier_by_id(sess, scenario_id)
            scenario_props = scenario_contract.make_properties()
            base_name.append(scenario_contract.name)

        start_date = scenario_props['scenario_start']
        if start_date is None:
            start_date = utc_datetime(now.year, now.month, 1)
        else:
            start_date = to_utc(start_date)

        base_name.append(
            hh_format(start_date).replace(' ',
                                          '_').replace(':',
                                                       '').replace('-', ''))

        months = scenario_props['scenario_duration']
        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        if 'forecast_from' in scenario_props:
            forecast_from = scenario_props['forecast_from']
        else:
            forecast_from = None

        if forecast_from is None:
            forecast_from = chellow.computer.forecast_date()
        else:
            forecast_from = to_utc(forecast_from)

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append('supply')
            base_name.append(str(supply.id))
            sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        site_rows = []
        era_rows = []

        for rate_script in get_map_list(scenario_props, 'local_rates'):
            contract_id = rate_script['contract_id']
            try:
                cont_cache = rate_cache[contract_id]
            except KeyError:
                cont_cache = rate_cache[contract_id] = {}

            try:
                rate_script_start = rate_script['start_date']
            except KeyError:
                raise BadRequest(
                    "Problem in the scenario properties. Can't find the " +
                    "'start_date' key of the contract " + str(contract_id) +
                    " in the 'local_rates' map.")

            try:
                rate_script_start = rate_script['start_date']
            except KeyError:
                raise BadRequest(
                    "Problem in the scenario properties. Can't find the " +
                    "'start_date' key of the contract " + str(contract_id) +
                    " in the 'local_rates' map.")

            for dt in hh_range(report_context, rate_script_start,
                               rate_script['finish_date']):
                cont_cache[dt] = PropDict('scenario properties',
                                          rate_script['script'])

        for rate_script in get_map_list(scenario_props, 'industry_rates'):
            contract_name = rate_script['contract_name']
            try:
                cont_cache = ind_cont[contract_name]
            except KeyError:
                cont_cache = ind_cont[contract_name] = {}

            rfinish = rate_script['finish_date']
            if rfinish is None:
                raise BadRequest("For the industry rate " + contract_name +
                                 " the "
                                 "finish_date can't be null.")
            for dt in hh_range(report_context, rate_script['start_date'],
                               rfinish):
                cont_cache[dt] = PropDict('scenario properties',
                                          rate_script['script'])

        era_maps = scenario_props.get('era_maps', {})

        scenario_hh = scenario_props.get('hh_data', {})

        era_header_titles = [
            'creation-date', 'imp-mpan-core', 'imp-supplier-contract',
            'exp-mpan-core', 'exp-supplier-contract', 'metering-type',
            'source', 'generator-type', 'supply-name', 'msn', 'pc', 'site-id',
            'site-name', 'associated-site-ids', 'month'
        ]
        site_header_titles = [
            'creation-date', 'site-id', 'site-name', 'associated-site-ids',
            'month', 'metering-type', 'sources', 'generator-types'
        ]
        summary_titles = [
            'import-net-kwh', 'export-net-kwh', 'import-gen-kwh',
            'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh',
            'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh',
            'import-net-gbp', 'export-net-gbp', 'import-gen-gbp',
            'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp',
            'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp',
            'billed-import-net-kwh', 'billed-import-net-gbp'
        ]

        title_dict = {}
        for cont_type, con_attr in (('mop', Era.mop_contract),
                                    ('dc', Era.dc_contract),
                                    ('imp-supplier',
                                     Era.imp_supplier_contract),
                                    ('exp-supplier',
                                     Era.exp_supplier_contract)):
            titles = []
            title_dict[cont_type] = titles
            conts = sess.query(Contract).join(con_attr).join(Era.supply). \
                join(Source).filter(
                    Era.start_date <= finish_date, or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date),
                    Source.code.in_(('net', '3rd-party'))
                ).distinct().order_by(Contract.id)
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, 'virtual_bill_titles')
                if title_func is None:
                    raise Exception("For the contract " + cont.name +
                                    " there doesn't seem to be a "
                                    "'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        tpr_query = sess.query(Tpr).join(MeasurementRequirement).join(Ssc). \
            join(Era).filter(
                Era.start_date <= finish_date, or_(
                    Era.finish_date == null(),
                    Era.finish_date >= start_date)
            ).order_by(Tpr.code).distinct()
        for tpr in tpr_query.filter(Era.imp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['imp-supplier'].append(tpr.code + suffix)
        for tpr in tpr_query.filter(Era.exp_supplier_contract != null()):
            for suffix in ('-kwh', '-rate', '-gbp'):
                title_dict['exp-supplier'].append(tpr.code + suffix)

        era_rows.append(
            era_header_titles + summary_titles + [None] +
            ['mop-' + t for t in title_dict['mop']] + [None] +
            ['dc-' + t for t in title_dict['dc']] + [None] +
            ['imp-supplier-' + t for t in title_dict['imp-supplier']] +
            [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']])
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        deltas = {}
        for site in sites:
            try:
                site_scenario_hh = scenario_hh[site.code]
            except KeyError:
                site_scenario_hh = scenario_hh[site.code] = {}

            site_deltas = deltas[site.code] = {'hhs': {}}
            delts = site_deltas['supply_deltas'] = {}
            for is_import in (True, False):
                delts[is_import] = {}
                for src in ('gen', 'net', 'gen-net', '3rd-party',
                            '3rd-party-reverse', 'sub'):
                    delts[is_import][src] = {'site': {}}

            earliest_delta = to_utc(Datetime.max)
            latest_delta = to_utc(Datetime.min)

            found_hh = False
            for typ in ('used', 'generated', 'parasitic', 'gen_net'):
                hh_str = site_scenario_hh.get(typ, '')
                hh_data = site_scenario_hh[typ] = {}
                for row in csv.reader(StringIO(hh_str)):
                    cells = [cell.strip() for cell in row]
                    if len(''.join(cells)) == 0:
                        continue

                    if len(cells) != 2:
                        raise BadRequest(
                            "Can't interpret the row " + str(cells) +
                            " it should be of the form 'timestamp, kWh'")

                    date_str, kw_str = cells
                    ts = parse_hh_start(date_str)
                    earliest_delta = min(ts, earliest_delta)
                    latest_delta = max(ts, latest_delta)
                    hh_data[ts] = float(kw_str)
                    found_hh = True

            if not found_hh:
                continue

            scenario_used = site_scenario_hh['used']
            scenario_generated = site_scenario_hh['generated']
            scenario_parasitic = site_scenario_hh['parasitic']
            scenario_gen_net = site_scenario_hh['gen_net']

            month_start = utc_datetime(earliest_delta.year,
                                       earliest_delta.month)
            while month_start <= latest_delta:
                month_finish = month_start + relativedelta(months=1) - HH
                chunk_start = hh_max(month_start, earliest_delta)
                chunk_finish = hh_min(month_finish, latest_delta)
                site_ds = chellow.computer.SiteSource(sess, site, chunk_start,
                                                      chunk_finish,
                                                      forecast_from,
                                                      report_context)
                hh_map = dict((h['start-date'], h) for h in site_ds.hh_data)

                for era in sess.query(Era).join(SiteEra).join(Pc).filter(
                        SiteEra.site == site, SiteEra.is_physical == true(),
                        Era.imp_mpan_core != null(), Pc.code != '00',
                        Era.start_date <= chunk_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= chunk_start),
                        ~Era.channels.any()):

                    if supply_id is not None and era.supply_id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, chunk_start)
                    ss_finish = hh_min(era.finish_date, chunk_finish)

                    ss = SupplySource(sess, ss_start, ss_finish, forecast_from,
                                      era, True, report_context)

                    for hh in ss.hh_data:
                        sdatum = hh_map[hh['start-date']]
                        sdatum['import-net-kwh'] += hh['msp-kwh']
                        sdatum['used-kwh'] += hh['msp-kwh']

                for era in sess.query(Era).join(SiteEra).join(Pc).join(
                        Supply).join(Source).filter(
                            SiteEra.site == site,
                            SiteEra.is_physical == true(),
                            Era.imp_mpan_core != null(),
                            Era.start_date <= chunk_finish,
                            or_(Era.finish_date == null(),
                                Era.finish_date >= chunk_start),
                            Source.code == 'gen-net'):

                    if supply_id is not None and era.supply_id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, chunk_start)
                    ss_finish = hh_min(era.finish_date, chunk_finish)

                    ss = SupplySource(sess, ss_start, ss_finish, forecast_from,
                                      era, False, report_context)

                    for hh in ss.hh_data:
                        sdatum = hh_map[hh['start-date']]
                        try:
                            sdatum['gen-net-kwh'] += hh['msp-kwh']
                        except KeyError:
                            sdatum['gen-net-kwh'] = hh['msp-kwh']

                for hh in site_ds.hh_data:
                    hh_start = hh['start-date']
                    if hh_start in scenario_used:
                        used_delt = scenario_used[hh_start] - hh['used-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if used_delt < 0:
                            diff = hh['import-net-kwh'] + used_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt += used_delt
                        else:
                            diff = hh['export-net-kwh'] - used_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt -= used_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['used-kwh'] += used_delt
                        hh['msp-kwh'] -= exp_net_delt

                    if hh_start in scenario_generated:
                        imp_gen_delt = scenario_generated[hh_start] - \
                            hh['import-gen-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if imp_gen_delt < 0:
                            diff = hh['export-net-kwh'] + imp_gen_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt += imp_gen_delt
                        else:
                            diff = hh['import-net-kwh'] - imp_gen_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt -= imp_gen_delt

                        try:
                            delts[True]['gen']['site'][hh_start] += \
                                imp_gen_delt
                        except KeyError:
                            delts[True]['gen']['site'][hh_start] = imp_gen_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['import-gen-kwh'] += imp_gen_delt
                        hh['msp-kwh'] -= imp_net_delt

                    if hh_start in scenario_parasitic:
                        exp_gen_delt = scenario_parasitic[hh_start] - \
                            hh['export-gen-kwh']
                        imp_net_delt = 0
                        exp_net_delt = 0

                        if exp_gen_delt < 0:
                            diff = hh['import-net-kwh'] + exp_gen_delt
                            if diff < 0:
                                imp_net_delt -= hh['import-net-kwh']
                                exp_net_delt -= diff
                            else:
                                imp_net_delt += exp_gen_delt
                        else:
                            diff = hh['export-net-kwh'] - exp_gen_delt
                            if diff < 0:
                                exp_net_delt -= hh['export-net-kwh']
                                imp_net_delt -= diff
                            else:
                                exp_net_delt -= exp_gen_delt

                        try:
                            delts[False]['gen']['site'][hh_start] += \
                                imp_gen_delt
                        except KeyError:
                            delts[False]['gen']['site'][hh_start] = \
                                exp_gen_delt

                        try:
                            delts[False]['net']['site'][hh_start] += \
                                exp_net_delt
                        except KeyError:
                            delts[False]['net']['site'][hh_start] = \
                                exp_net_delt

                        try:
                            delts[True]['net']['site'][hh_start] += \
                                imp_net_delt
                        except KeyError:
                            delts[True]['net']['site'][hh_start] = imp_net_delt

                        hh['import-net-kwh'] += imp_net_delt
                        hh['export-net-kwh'] += exp_net_delt
                        hh['export-gen-kwh'] += exp_gen_delt
                        hh['msp-kwh'] -= imp_net_delt

                    if hh_start in scenario_gen_net:
                        gen_net_delt = scenario_gen_net[hh_start] - \
                            hh['gen-net-kwh']

                        try:
                            delts[False]['gen-net']['site'][hh_start] += \
                                gen_net_delt
                        except KeyError:
                            delts[False]['gen-net']['site'][hh_start] = \
                                gen_net_delt

                        hh['import-gen-kwh'] += gen_net_delt
                        hh['export-net-kwh'] += gen_net_delt

                    site_deltas['hhs'][hh_start] = hh
                month_start += relativedelta(months=1)

        month_start = start_date
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_category = None
                site_sources = set()
                site_gen_types = set()
                site_month_data = defaultdict(int)
                calcs = []
                for era in sess.query(Era).join(SiteEra).join(Pc).filter(
                        SiteEra.site == site, SiteEra.is_physical == true(),
                        Era.start_date <= month_finish,
                        or_(Era.finish_date == null(),
                            Era.finish_date >= month_start)).options(
                                joinedload(Era.ssc),
                                joinedload(Era.dc_contract),
                                joinedload(Era.mop_contract),
                                joinedload(Era.imp_supplier_contract),
                                joinedload(Era.exp_supplier_contract),
                                joinedload(Era.channels),
                                joinedload(Era.imp_llfc).joinedload(
                                    Llfc.voltage_level),
                                joinedload(Era.exp_llfc).joinedload(
                                    Llfc.voltage_level), joinedload(Era.cop),
                                joinedload(Era.supply).joinedload(Supply.dno),
                                joinedload(Era.supply).joinedload(
                                    Supply.gsp_group),
                                joinedload(Era.supply).joinedload(
                                    Supply.source),
                                joinedload(Era.mtc).joinedload(Mtc.meter_type),
                                joinedload(Era.pc),
                                joinedload(Era.site_eras)).order_by(Pc.code):

                    supply = era.supply
                    if supply.generator_type is not None:
                        site_gen_types.add(supply.generator_type.code)

                    if supply_id is not None and supply.id != supply_id:
                        continue

                    ss_start = hh_max(era.start_date, month_start)
                    ss_finish = hh_min(era.finish_date, month_finish)

                    if era.imp_mpan_core is None:
                        imp_ss = None
                    else:
                        sup_deltas = site_deltas['supply_deltas'][True][
                            supply.source.code]

                        imp_ss = SupplySource(sess,
                                              ss_start,
                                              ss_finish,
                                              forecast_from,
                                              era,
                                              True,
                                              report_context,
                                              era_maps=era_maps,
                                              deltas=sup_deltas)

                    if era.exp_mpan_core is None:
                        exp_ss = None
                        measurement_type = imp_ss.measurement_type
                    else:
                        sup_deltas = site_deltas['supply_deltas'][False][
                            supply.source.code]

                        exp_ss = SupplySource(sess,
                                              ss_start,
                                              ss_finish,
                                              forecast_from,
                                              era,
                                              False,
                                              report_context,
                                              era_maps=era_maps,
                                              deltas=sup_deltas)
                        measurement_type = exp_ss.measurement_type

                    order = meter_order[measurement_type]
                    calcs.append((order, era.imp_mpan_core, era.exp_mpan_core,
                                  imp_ss, exp_ss))

                # Check if gen deltas haven't been consumed
                extra_sss = set()
                for is_imp in (True, False):
                    sup_deltas = site_deltas['supply_deltas'][is_imp]['gen']
                    if len(
                            list(t for t in sup_deltas['site']
                                 if month_start <= t <= month_finish)) > 0:
                        extra_sss.add(is_imp)

                displaced_era = chellow.computer.displaced_era(
                    sess,
                    report_context,
                    site,
                    month_start,
                    month_finish,
                    forecast_from,
                    has_scenario_generation=len(extra_sss) > 0)

                if len(extra_sss) > 0:
                    if True in extra_sss:
                        sup_deltas = site_deltas['supply_deltas'][True]['gen']
                        imp_ss_name = site.code + "_extra_gen_TRUE"
                        imp_ss = ScenarioSource(
                            sess, month_start, month_finish, True,
                            report_context, sup_deltas,
                            displaced_era.imp_supplier_contract, imp_ss_name)
                    else:
                        imp_ss_name = imp_ss = None
                    if False in extra_sss:
                        exp_ss_name = site.code + "_extra_gen_FALSE"
                        sup_deltas = site_deltas['supply_deltas'][False]['gen']
                        exp_ss = ScenarioSource(
                            sess, month_start, month_finish, False,
                            report_context, sup_deltas,
                            displaced_era.imp_supplier_contract, imp_ss_name)
                    else:
                        exp_ss_name = exp_ss = None

                    calcs.append((0, imp_ss_name, exp_ss_name, imp_ss, exp_ss))

                # Check if exp net deltas haven't been consumed
                sup_deltas = site_deltas['supply_deltas'][False]['net']
                if len(
                        list(t for t in sup_deltas['site']
                             if month_start <= t <= month_finish)) > 0:
                    ss_name = site.code + "_extra_net_export"
                    ss = SupplySource(sess,
                                      month_start,
                                      month_finish,
                                      forecast_from,
                                      displaced_era,
                                      False,
                                      report_context,
                                      era_maps=era_maps,
                                      deltas=sup_deltas)

                    calcs.append((0, None, ss_name, None, ss))

                site_ds = chellow.computer.SiteSource(sess,
                                                      site,
                                                      month_start,
                                                      month_finish,
                                                      forecast_from,
                                                      report_context,
                                                      displaced_era,
                                                      deltas=site_deltas)

                if displaced_era is not None and supply_id is None:
                    month_data = {}
                    for sname in ('import-net', 'export-net', 'import-gen',
                                  'export-gen', 'import-3rd-party',
                                  'export-3rd-party', 'msp', 'used',
                                  'used-3rd-party', 'billed-import-net'):
                        for xname in ('kwh', 'gbp'):
                            month_data[sname + '-' + xname] = 0

                    month_data['used-kwh'] = month_data['displaced-kwh'] = sum(
                        hh['msp-kwh'] for hh in site_ds.hh_data)

                    disp_supplier_contract = \
                        displaced_era.imp_supplier_contract
                    disp_vb_function = chellow.computer.contract_func(
                        report_context, disp_supplier_contract,
                        'displaced_virtual_bill')
                    if disp_vb_function is None:
                        raise BadRequest(
                            "The supplier contract " +
                            disp_supplier_contract.name +
                            " doesn't have the displaced_virtual_bill() "
                            "function.")
                    disp_vb_function(site_ds)
                    disp_supplier_bill = site_ds.supplier_bill

                    try:
                        gbp = disp_supplier_bill['net-gbp']
                    except KeyError:
                        disp_supplier_bill['problem'] += 'For the supply ' + \
                            site_ds.mpan_core + ' the virtual bill ' + \
                            str(disp_supplier_bill) + ' from the contract ' + \
                            disp_supplier_contract.name + \
                            ' does not contain the net-gbp key.'

                    month_data['used-gbp'] = month_data['displaced-gbp'] = \
                        site_ds.supplier_bill['net-gbp']

                    out = [
                        now, None, disp_supplier_contract.name, None, None,
                        displaced_era.meter_category, 'displaced', None, None,
                        None, None, site.code, site.name, '', month_finish
                    ] + [month_data[t] for t in summary_titles
                         ] + [None] + [None] * len(title_dict['mop']) + [
                             None
                         ] + [None] * len(title_dict['dc']) + [
                             None
                         ] + make_bill_row(title_dict['imp-supplier'],
                                           disp_supplier_bill)

                    era_rows.append(out)
                    for k, v in month_data.items():
                        site_month_data[k] += v

                for i, (order, imp_mpan_core, exp_mpan_core, imp_ss,
                        exp_ss) in enumerate(sorted(calcs, key=str)):
                    if imp_ss is None:
                        source_code = exp_ss.source_code
                        supply = exp_ss.supply
                    else:
                        source_code = imp_ss.source_code
                        supply = imp_ss.supply

                    site_sources.add(source_code)
                    month_data = {}
                    for name in ('import-net', 'export-net', 'import-gen',
                                 'export-gen', 'import-3rd-party',
                                 'export-3rd-party', 'displaced', 'used',
                                 'used-3rd-party', 'billed-import-net'):
                        for sname in ('kwh', 'gbp'):
                            month_data[name + '-' + sname] = 0

                    if imp_ss is not None:
                        imp_supplier_contract = imp_ss.supplier_contract
                        if imp_supplier_contract is not None:
                            import_vb_function = contract_func(
                                report_context, imp_supplier_contract,
                                'virtual_bill')
                            if import_vb_function is None:
                                raise BadRequest(
                                    "The supplier contract " +
                                    imp_supplier_contract.name +
                                    " doesn't have the virtual_bill() "
                                    "function.")
                            import_vb_function(imp_ss)

                        kwh = sum(hh['msp-kwh'] for hh in imp_ss.hh_data)
                        imp_supplier_bill = imp_ss.supplier_bill

                        try:
                            gbp = imp_supplier_bill['net-gbp']
                        except KeyError:
                            gbp = 0
                            imp_supplier_bill['problem'] += \
                                'For the supply ' + \
                                imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['import-net-gbp'] += gbp
                            month_data['import-net-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['export-gen-kwh'] += kwh
                        elif source_code == '3rd-party':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == 'gen':
                            month_data['import-gen-kwh'] += kwh

                    if exp_ss is not None:
                        exp_supplier_contract = exp_ss.supplier_contract
                        if exp_supplier_contract is not None:
                            export_vb_function = contract_func(
                                report_context, exp_supplier_contract,
                                'virtual_bill')
                            export_vb_function(exp_ss)

                        kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data)
                        exp_supplier_bill = exp_ss.supplier_bill
                        try:
                            gbp = exp_supplier_bill['net-gbp']
                        except KeyError:
                            exp_supplier_bill['problem'] += \
                                'For the supply ' + imp_ss.mpan_core + \
                                ' the virtual bill ' + \
                                str(imp_supplier_bill) + \
                                ' from the contract ' + \
                                imp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        if source_code in ('net', 'gen-net'):
                            month_data['export-net-gbp'] += gbp
                            month_data['export-net-kwh'] += kwh
                            if source_code == 'gen-net':
                                month_data['import-gen-kwh'] += kwh

                        elif source_code == '3rd-party':
                            month_data['export-3rd-party-gbp'] += gbp
                            month_data['export-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] -= gbp
                            month_data['used-3rd-party-kwh'] -= kwh
                            month_data['used-gbp'] -= gbp
                            month_data['used-kwh'] -= kwh
                        elif source_code == '3rd-party-reverse':
                            month_data['import-3rd-party-gbp'] += gbp
                            month_data['import-3rd-party-kwh'] += kwh
                            month_data['used-3rd-party-gbp'] += gbp
                            month_data['used-3rd-party-kwh'] += kwh
                            month_data['used-gbp'] += gbp
                            month_data['used-kwh'] += kwh
                        elif source_code == 'gen':
                            month_data['export-gen-kwh'] += kwh

                    sss = exp_ss if imp_ss is None else imp_ss
                    dc_contract = sss.dc_contract
                    if dc_contract is not None:
                        sss.contract_func(dc_contract, 'virtual_bill')(sss)
                    dc_bill = sss.dc_bill
                    gbp = dc_bill['net-gbp']

                    mop_contract = sss.mop_contract
                    if mop_contract is not None:
                        mop_bill_function = sss.contract_func(
                            mop_contract, 'virtual_bill')
                        mop_bill_function(sss)
                    mop_bill = sss.mop_bill
                    gbp += mop_bill['net-gbp']

                    if source_code in ('3rd-party', '3rd-party-reverse'):
                        month_data['import-3rd-party-gbp'] += gbp
                        month_data['used-3rd-party-gbp'] += gbp
                    else:
                        month_data['import-net-gbp'] += gbp
                    month_data['used-gbp'] += gbp

                    generator_type = sss.generator_type_code
                    if source_code in ('gen', 'gen-net'):
                        site_gen_types.add(generator_type)

                    era_category = sss.measurement_type
                    if CATEGORY_ORDER[site_category] < \
                            CATEGORY_ORDER[era_category]:
                        site_category = era_category

                    era_associates = set()
                    if mop_contract is not None:
                        era_associates.update({
                            s.site.code
                            for s in era.site_eras if not s.is_physical
                        })

                        for bill in sess.query(Bill).filter(
                                Bill.supply == supply,
                                Bill.start_date <= sss.finish_date,
                                Bill.finish_date >= sss.start_date):
                            bill_start = bill.start_date
                            bill_finish = bill.finish_date
                            bill_duration = (
                                bill_finish - bill_start).total_seconds() + \
                                (30 * 60)
                            overlap_duration = (
                                min(bill_finish, sss.finish_date) -
                                max(bill_start, sss.start_date)
                            ).total_seconds() + (30 * 60)
                            overlap_proportion = overlap_duration / \
                                bill_duration
                            month_data['billed-import-net-kwh'] += \
                                overlap_proportion * float(bill.kwh)
                            month_data['billed-import-net-gbp'] += \
                                overlap_proportion * float(bill.net)

                    if imp_ss is None:
                        imp_supplier_contract_name = None
                        pc_code = exp_ss.pc_code
                    else:
                        if imp_supplier_contract is None:
                            imp_supplier_contract_name = ''
                        else:
                            imp_supplier_contract_name = \
                                imp_supplier_contract.name
                        pc_code = imp_ss.pc_code

                    if exp_ss is None:
                        exp_supplier_contract_name = None
                    else:
                        if exp_supplier_contract is None:
                            exp_supplier_contract_name = ''
                        else:
                            exp_supplier_contract_name = \
                                exp_supplier_contract.name

                    out = [
                        now, imp_mpan_core, imp_supplier_contract_name,
                        exp_mpan_core, exp_supplier_contract_name,
                        era_category, source_code, generator_type,
                        sss.supply_name, sss.msn, pc_code, site.code,
                        site.name, ','.join(sorted(list(era_associates))),
                        month_finish] + [
                        month_data[t] for t in summary_titles] + [None] + \
                        make_bill_row(title_dict['mop'], mop_bill) + [None] + \
                        make_bill_row(title_dict['dc'], dc_bill)
                    if imp_ss is None:
                        out += [None] * (len(title_dict['imp-supplier']) + 1)
                    else:
                        out += [None] + make_bill_row(
                            title_dict['imp-supplier'], imp_supplier_bill)
                    if exp_ss is not None:
                        out += [None] + make_bill_row(
                            title_dict['exp-supplier'], exp_supplier_bill)

                    for k, v in month_data.items():
                        site_month_data[k] += v
                    era_rows.append(out)

                site_rows.append([
                    now, site.code, site.name, ', '.join(
                        s.code for s in site.find_linked_sites(
                            sess, month_start, month_finish)), month_finish,
                    site_category, ', '.join(sorted(list(site_sources))),
                    ', '.join(sorted(list(site_gen_types)))
                ] + [site_month_data[k] for k in summary_titles])
                sess.rollback()
            write_spreadsheet(rf, compression, site_rows, era_rows)
            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Пример #15
0
def ccl(data_source, ct_month=False):
    rate_set = data_source.supplier_rate_sets['ccl-rate']

    if data_source.supply.find_era_at(
            data_source.sess, data_source.finish_date + HH) is None:
        sup_end = data_source.finish_date
    else:
        sup_end = None

    try:
        cache = data_source.caches['ccl']
    except:
        data_source.caches['ccl'] = {}
        cache = data_source.caches['ccl']

        try:
            future_funcs = data_source.caches['future_funcs']
        except KeyError:
            future_funcs = {}
            data_source.caches['future_funcs'] = future_funcs

        try:
            future_funcs[ccl_contract_id]
        except KeyError:
            future_funcs[ccl_contract_id] = {
                'start_date': None, 'func': create_future_func(1, 0)}

    if data_source.bill is None:
        for hh in data_source.hh_data:
            if hh['ct-is-month-end'] or hh['start-date'] == sup_end:
                finish_year = hh['start-date'].year
                finish_month = hh['start-date'].month
                kwh = 0
                gbp = 0
                if ct_month:
                    month_start = to_utc(
                        ct_datetime(finish_year, finish_month))
                    month_finish = hh['start-date']
                else:
                    month_start = utc_datetime(finish_year, finish_month)
                    month_finish = month_start + relativedelta(months=1) - HH

                for ds in chellow.computer.get_data_sources(
                        data_source, month_start, month_finish):
                    for datum in ds.hh_data:
                        try:
                            rate = cache[datum['start-date']]
                        except KeyError:
                            cache[datum['start-date']] = data_source.hh_rate(
                                ccl_contract_id, datum['start-date'],
                                'ccl_rate')
                            rate = cache[datum['start-date']]

                        rate_set.add(rate)
                        kwh += datum['msp-kwh']
                        gbp += datum['msp-kwh'] * rate

                if kwh > 999:
                    hh['ccl-kwh'] = kwh
                    hh['ccl-gbp'] = gbp

    elif data_source.is_last_bill_gen:
        kwh = 0
        gbp = 0
        for ds in chellow.computer.get_data_sources(
                data_source, data_source.bill_start, data_source.bill_finish):
            for hh in ds.hh_data:
                try:
                    rate = cache[hh['start-date']]
                except KeyError:
                    cache[hh['start-date']] = data_source.hh_rate(
                        ccl_contract_id, hh['start-date'], 'ccl_rate')
                    rate = cache[hh['start-date']]

                rate_set.add(rate)
                kwh += hh['msp-kwh']
                gbp += hh['msp-kwh'] * rate

        hhs = (
            data_source.bill_finish - data_source.bill_start).total_seconds()
        if (kwh / hhs) > ((1000 * 12) / (365 * 24 * 60 * 60)):
            data_source.hh_data[-1]['ccl-kwh'] = kwh
            data_source.hh_data[-1]['ccl-gbp'] = gbp
Пример #16
0
def content(batch_id, bill_id, contract_id, start_date, finish_date, user):
    caches = {}
    tmp_file = sess = bill = None
    forecast_date = to_utc(Datetime.max)
    sess = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'bill_check.csv', user)
        tmp_file = open(running_name, mode='w', newline='')
        writer = csv.writer(tmp_file, lineterminator='\n')
        bills = sess.query(Bill).order_by(
            Bill.supply_id, Bill.reference).options(
            joinedload(Bill.supply),
            subqueryload(Bill.reads).joinedload(RegisterRead.present_type),
            subqueryload(Bill.reads).joinedload(RegisterRead.previous_type),
            joinedload(Bill.batch))
        if batch_id is not None:
            batch = Batch.get_by_id(sess, batch_id)
            bills = bills.filter(Bill.batch == batch)
            contract = batch.contract
        elif bill_id is not None:
            bill = Bill.get_by_id(sess, bill_id)
            bills = bills.filter(Bill.id == bill.id)
            contract = bill.batch.contract
        elif contract_id is not None:
            contract = Contract.get_by_id(sess, contract_id)
            bills = bills.join(Batch).filter(
                Batch.contract == contract, Bill.start_date <= finish_date,
                Bill.finish_date >= start_date)

        market_role_code = contract.market_role.code
        vbf = chellow.computer.contract_func(caches, contract, 'virtual_bill')
        if vbf is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill.")

        virtual_bill_titles_func = chellow.computer.contract_func(
            caches, contract, 'virtual_bill_titles')
        if virtual_bill_titles_func is None:
            raise BadRequest(
                'The contract ' + contract.name +
                " doesn't have a function virtual_bill_titles.")
        virtual_bill_titles = virtual_bill_titles_func()

        titles = [
            'batch', 'bill-reference', 'bill-type', 'bill-kwh', 'bill-net-gbp',
            'bill-vat-gbp', 'bill-start-date', 'bill-finish-date',
            'imp-mpan-core', 'exp-mpan-core', 'site-code', 'site-name',
            'covered-from', 'covered-to', 'covered-bills', 'metered-kwh']
        for t in virtual_bill_titles:
            titles.append('covered-' + t)
            titles.append('virtual-' + t)
            if t.endswith('-gbp'):
                titles.append('difference-' + t)

        writer.writerow(titles)

        bill_map = defaultdict(set, {})
        for bill in bills:
            bill_map[bill.supply.id].add(bill.id)

        for supply_id, bill_ids in bill_map.items():
            gaps = {}
            data_sources = {}

            while len(bill_ids) > 0:
                bill_id = list(sorted(bill_ids))[0]
                bill_ids.remove(bill_id)
                bill = sess.query(Bill).filter(Bill.id == bill_id).options(
                    joinedload(Bill.batch),
                    joinedload(Bill.bill_type),
                    joinedload(Bill.reads),
                    joinedload(Bill.supply),
                    joinedload(Bill.reads).joinedload(
                        RegisterRead.present_type),
                    joinedload(Bill.reads).joinedload(
                        RegisterRead.previous_type)).one()
                virtual_bill = {'problem': ''}
                supply = bill.supply

                read_dict = {}
                for read in bill.reads:
                    gen_start = read.present_date.replace(hour=0).replace(
                        minute=0)
                    gen_finish = gen_start + relativedelta(days=1) - HH
                    msn_match = False
                    read_msn = read.msn
                    for read_era in supply.find_eras(
                            sess, gen_start, gen_finish):
                        if read_msn == read_era.msn:
                            msn_match = True
                            break

                    if not msn_match:
                        virtual_bill['problem'] += "The MSN " + read_msn + \
                            " of the register read " + str(read.id) + \
                            " doesn't match the MSN of the era."

                    for dt, typ in [
                            (read.present_date, read.present_type),
                            (read.previous_date, read.previous_type)]:
                        key = str(dt) + "-" + read.msn
                        try:
                            if typ != read_dict[key]:
                                virtual_bill['problem'] += " Reads taken " + \
                                    "on " + str(dt) + \
                                    " have differing read types."
                        except KeyError:
                            read_dict[key] = typ

                bill_start = bill.start_date
                bill_finish = bill.finish_date

                covered_start = bill_start
                covered_finish = bill_finish
                covered_bdown = {'sum-msp-kwh': 0, 'net-gbp': 0, 'vat-gbp': 0}

                vb_elems = set()
                enlarged = True

                while enlarged:
                    enlarged = False
                    covered_elems = find_elements(bill)
                    covered_bills = OrderedDict(
                        (b.id, b) for b in sess.query(Bill).join(Batch).
                        join(Contract).join(MarketRole).filter(
                            Bill.supply == supply,
                            Bill.start_date <= covered_finish,
                            Bill.finish_date >= covered_start,
                            MarketRole.code == market_role_code).order_by(
                                Bill.start_date, Bill.issue_date))
                    while True:
                        to_del = None
                        for a, b in combinations(covered_bills.values(), 2):
                            if all(
                                    (
                                        a.start_date == b.start_date,
                                        a.finish_date == b.finish_date,
                                        a.kwh == -1 * b.kwh,
                                        a.net == -1 * b.net,
                                        a.vat == -1 * b.vat,
                                        a.gross == -1 * b.gross)):
                                to_del = (a.id, b.id)
                                break
                        if to_del is None:
                            break
                        else:
                            for k in to_del:
                                del covered_bills[k]

                    for k, covered_bill in tuple(covered_bills.items()):
                        elems = find_elements(covered_bill)
                        if elems.isdisjoint(covered_elems):
                            if k != bill.id:
                                del covered_bills[k]
                                continue
                        else:
                            covered_elems.update(elems)

                        if covered_bill.start_date < covered_start:
                            covered_start = covered_bill.start_date
                            enlarged = True
                            break

                        if covered_bill.finish_date > covered_finish:
                            covered_finish = covered_bill.finish_date
                            enlarged = True
                            break

                if len(covered_bills) == 0:
                    continue

                primary_covered_bill = None
                for covered_bill in covered_bills.values():
                    if covered_bill.id in bill_ids:
                        bill_ids.remove(covered_bill.id)
                    covered_bdown['net-gbp'] += float(covered_bill.net)
                    covered_bdown['vat-gbp'] += float(covered_bill.vat)
                    covered_bdown['sum-msp-kwh'] += float(covered_bill.kwh)
                    covered_rates = defaultdict(set)
                    for k, v in loads(covered_bill.breakdown).items():
                        if k in ('raw_lines', 'raw-lines'):
                            continue

                        if isinstance(v, list):
                            covered_rates[k].update(set(v))
                        else:
                            if isinstance(v, Decimal):
                                v = float(v)
                            try:
                                covered_bdown[k] += v
                            except KeyError:
                                covered_bdown[k] = v
                            except TypeError as detail:
                                raise BadRequest(
                                    "For key " + str(k) + " in " + str(
                                        [
                                            b.id for b in
                                            covered_bills.values()
                                        ]) + " the value " + str(v) +
                                    " can't be added to the existing value " +
                                    str(covered_bdown[k]) + ". " + str(detail))

                            if k.endswith('-gbp'):
                                elem = k[:-4]
                                covered_elems.add(elem)
                                add_gap(
                                    caches, gaps, elem,
                                    covered_bill.start_date,
                                    covered_bill.finish_date, False, v)

                    for k, v in covered_rates.items():
                        covered_bdown[k] = v.pop() if len(v) == 1 else None

                    if primary_covered_bill is None or (
                            (
                                covered_bill.finish_date -
                                covered_bill.start_date) > (
                                primary_covered_bill.finish_date -
                                primary_covered_bill.start_date)):
                        primary_covered_bill = covered_bill

                metered_kwh = 0
                for era in sess.query(Era).filter(
                        Era.supply == supply, Era.start_date <= covered_finish,
                        or_(
                            Era.finish_date == null(),
                            Era.finish_date >= covered_start)
                        ).distinct().options(
                        joinedload(Era.channels),
                        joinedload(Era.cop),
                        joinedload(Era.dc_contract),
                        joinedload(Era.exp_llfc),
                        joinedload(Era.exp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.exp_supplier_contract),
                        joinedload(Era.imp_llfc),
                        joinedload(Era.imp_llfc).joinedload(
                            Llfc.voltage_level),
                        joinedload(Era.imp_supplier_contract),
                        joinedload(Era.mop_contract),
                        joinedload(Era.mtc).joinedload(Mtc.meter_type),
                        joinedload(Era.pc),
                        joinedload(Era.supply).joinedload(Supply.dno),
                        joinedload(Era.supply).joinedload(Supply.gsp_group),
                        joinedload(Era.supply).joinedload(Supply.source)):

                    chunk_start = hh_max(covered_start, era.start_date)
                    chunk_finish = hh_min(covered_finish, era.finish_date)

                    if contract not in (
                            era.mop_contract, era.dc_contract,
                            era.imp_supplier_contract,
                            era.exp_supplier_contract):
                        virtual_bill['problem'] += ''.join(
                            (
                                "From ", hh_format(chunk_start), " to ",
                                hh_format(chunk_finish), " the contract of ",
                                "the era doesn't match the contract of the ",
                                "bill."))
                        continue

                    if contract.market_role.code == 'X':
                        polarity = contract != era.exp_supplier_contract
                    else:
                        polarity = era.imp_supplier_contract is not None
                    '''
                    pairs = []
                    last_finish = chunk_start - HH
                    for hd in chellow.computer.datum_range(
                            sess, caches, 0, chunk_start, chunk_finish):
                        if hd['utc-is-month-end'] or hd['ct-is-month-end']:
                            end_date = hd['start-date']
                            pairs.append((last_finish + HH, end_date))
                            last_finish = end_date
                    if hd['start-date'] > last_finish:
                        pairs.append((last_finish + HH, hd['start-date']))

                    for ss_start, ss_finish in pairs:
                    '''
                    try:
                        ds_key = (
                            chunk_start, chunk_finish, forecast_date, era.id,
                            polarity, primary_covered_bill.id)
                        data_source = data_sources[ds_key]
                    except KeyError:
                        data_source = data_sources[ds_key] = \
                            chellow.computer.SupplySource(
                            sess, chunk_start, chunk_finish, forecast_date,
                            era, polarity, caches, primary_covered_bill)
                        vbf(data_source)

                    if data_source.measurement_type == 'hh':
                        metered_kwh += sum(
                            h['msp-kwh'] for h in data_source.hh_data)
                    else:
                        ds = chellow.computer.SupplySource(
                            sess, chunk_start, chunk_finish, forecast_date,
                            era, polarity, caches)
                        metered_kwh += sum(
                            h['msp-kwh'] for h in ds.hh_data)

                    if market_role_code == 'X':
                        vb = data_source.supplier_bill
                    elif market_role_code == 'C':
                        vb = data_source.dc_bill
                    elif market_role_code == 'M':
                        vb = data_source.mop_bill
                    else:
                        raise BadRequest("Odd market role.")

                    for k, v in vb.items():
                        try:
                            if isinstance(v, set):
                                virtual_bill[k].update(v)
                            else:
                                virtual_bill[k] += v
                        except KeyError:
                            virtual_bill[k] = v
                        except TypeError as detail:
                            raise BadRequest(
                                "For key " + str(k) + " and value " +
                                str(v) + ". " + str(detail))

                        if all((k.endswith('-gbp'), k != 'net-gbp', v != 0)):
                            add_gap(
                                caches, gaps, k[:-4], chunk_start,
                                chunk_finish, True, v)

                    for k in virtual_bill.keys():
                        if k.endswith('-gbp'):
                            vb_elems.add(k[:-4])

                long_map = {}
                vb_keys = set(virtual_bill.keys())
                for elem in sorted(vb_elems, key=len, reverse=True):
                    els = long_map[elem] = set()
                    for k in tuple(vb_keys):
                        if k.startswith(elem + '-'):
                            els.add(k)
                            vb_keys.remove(k)

                for elem in vb_elems.difference(covered_elems):
                    for k in long_map[elem]:
                        del virtual_bill[k]

                try:
                    del virtual_bill['net-gbp']
                except KeyError:
                    pass

                virtual_bill['net-gbp'] = sum(
                    v for k, v in virtual_bill.items() if k.endswith('-gbp'))

                era = supply.find_era_at(sess, bill_finish)
                if era is None:
                    imp_mpan_core = exp_mpan_core = None
                    site_code = site_name = None
                    virtual_bill['problem'] += \
                        "This bill finishes before or after the supply. "
                else:
                    imp_mpan_core = era.imp_mpan_core
                    exp_mpan_core = era.exp_mpan_core

                    site = sess.query(Site).join(SiteEra).filter(
                        SiteEra.is_physical == true(),
                        SiteEra.era == era).one()
                    site_code = site.code
                    site_name = site.name

                # Find bill to use for header data
                if bill.id not in covered_bills:
                    for cbill in covered_bills.values():
                        if bill.batch == cbill.batch:
                            bill = cbill

                values = [
                    bill.batch.reference, bill.reference, bill.bill_type.code,
                    bill.kwh, bill.net, bill.vat, hh_format(bill_start),
                    hh_format(bill_finish), imp_mpan_core, exp_mpan_core,
                    site_code, site_name, hh_format(covered_start),
                    hh_format(covered_finish), ':'.join(
                        str(i).replace(',', '') for i in covered_bills.keys()),
                    metered_kwh]

                for title in virtual_bill_titles:
                    try:
                        cov_val = covered_bdown[title]
                        values.append(cov_val)
                        del covered_bdown[title]
                    except KeyError:
                        cov_val = None
                        values.append('')

                    try:
                        virt_val = csv_make_val(virtual_bill[title])
                        values.append(virt_val)
                        del virtual_bill[title]
                    except KeyError:
                        virt_val = 0
                        values.append('')

                    if title.endswith('-gbp'):
                        if isinstance(virt_val, (int, float, Decimal)):
                            if isinstance(cov_val, (int, float, Decimal)):
                                values.append(float(cov_val) - float(virt_val))
                            else:
                                values.append(0 - float(virt_val))
                        else:
                            values.append('')

                for title in sorted(virtual_bill.keys()):
                    virt_val = csv_make_val(virtual_bill[title])
                    values += ['virtual-' + title, virt_val]
                    if title in covered_bdown:
                        values += ['covered-' + title, covered_bdown[title]]
                    else:
                        values += ['', '']

                writer.writerow(values)

                for bill in sess.query(Bill).filter(
                        Bill.supply == supply,
                        Bill.start_date <= covered_finish,
                        Bill.finish_date >= covered_start):

                    for k, v in loads(bill.breakdown).items():
                        if k.endswith('-gbp'):
                            add_gap(
                                caches, gaps, k[:-4], bill.start_date,
                                bill.finish_date, False, v)

                # Avoid long-running transactions
                sess.rollback()

            clumps = []
            for element, elgap in sorted(gaps.items()):
                for start_date, hhgap in sorted(elgap.items()):
                    if hhgap['has_virtual'] and not hhgap['has_covered']:

                        if len(clumps) == 0 or not all(
                                (
                                    clumps[-1]['element'] == element,
                                    clumps[-1]['finish_date'] + HH ==
                                    start_date)):
                            clumps.append(
                                {
                                    'element': element,
                                    'start_date': start_date,
                                    'finish_date': start_date,
                                    'gbp': hhgap['gbp']})
                        else:
                            clumps[-1]['finish_date'] = start_date

            for i, clump in enumerate(clumps):
                vals = dict((title, '') for title in titles)
                vals['covered-problem'] = '_'.join(
                    (
                        'missing', clump['element'], 'supplyid',
                        str(supply.id), 'from',
                        hh_format(clump['start_date'])))
                vals['imp-mpan-core'] = imp_mpan_core
                vals['exp-mpan-core'] = exp_mpan_core
                vals['batch'] = 'missing_bill'
                vals['bill-start-date'] = hh_format(clump['start_date'])
                vals['bill-finish-date'] = hh_format(clump['finish_date'])
                vals['difference-net-gbp'] = clump['gbp']
                writer.writerow(vals[title] for title in titles)

            # Avoid long-running transactions
            sess.rollback()

    except BadRequest as e:
        if bill is None:
            prefix = "Problem: "
        else:
            prefix = "Problem with bill " + str(bill.id) + ':'
        tmp_file.write(prefix + e.description)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        tmp_file.write("Problem " + msg)
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)
Пример #17
0
def to_vf(dmy):
    dt = to_date(dmy)
    if dt is None:
        return ''
    else:
        return to_utc(dt).strftime(FMT)
Пример #18
0
def datum_2012_02_23(ds, hh):
    start_date = hh["start-date"]
    dno_cache = ds.caches["dno"][ds.dno_code]

    if not ds.full_channels and hh["msp-kwh"] == 0:
        imp_msp_kvarh, exp_msp_kvarh = 0, 0
    else:
        imp_msp_kvarh, exp_msp_kvarh = hh["imp-msp-kvarh"], hh["exp-msp-kvarh"]

    try:
        gsp_group_cache = dno_cache[ds.gsp_group_code]
    except KeyError:
        gsp_group_cache = dno_cache[ds.gsp_group_code] = {}

    try:
        tariff = gsp_group_cache["tariffs"][ds.pc_code][ds.llfc_code][start_date]
    except KeyError:
        try:
            tariffs_cache = gsp_group_cache["tariffs"]
        except KeyError:
            tariffs_cache = gsp_group_cache["tariffs"] = {}

        try:
            pc_cache = tariffs_cache[ds.pc_code]
        except KeyError:
            pc_cache = tariffs_cache[ds.pc_code] = {}

        try:
            tariffs = pc_cache[ds.llfc_code]
        except KeyError:
            tariffs = pc_cache[ds.llfc_code] = {}

        try:
            tariff = tariffs[start_date]
        except KeyError:
            tariff = None
            try:
                tariff_list = get_file_rates(ds.caches, ds.dno_code, start_date)[
                    ds.gsp_group_code
                ]["tariffs"]
            except KeyError as e:
                raise BadRequest(str(e))

            for llfcs_pcs, tf in tariff_list.items():
                key = llfcs_pcs.split("_")
                llfcs = [v.strip() for v in key[0].split(",")]
                if len(key) == 2:
                    pcs = [v.strip() for v in key[1].split(",")]
                else:
                    pcs = None

                if ds.llfc_code in llfcs and (pcs is None or ds.pc_code in pcs):
                    tariff = tf
                    break

            if tariff is None:
                raise BadRequest(
                    f"For the DNO {ds.dno_code} and timestamp {hh_format(start_date)} "
                    f"and GSP group {ds.gsp_group_code}, the LLFC {ds.llfc_code} "
                    f"with PC {ds.pc_code} can't be found in the 'tariffs' section."
                )

            tariffs[start_date] = tariff

    try:
        band = gsp_group_cache["bands"][start_date]
    except KeyError:
        try:
            bands_cache = gsp_group_cache["bands"]
        except KeyError:
            bands_cache = gsp_group_cache["bands"] = {}

        try:
            band = bands_cache[start_date]
        except KeyError:
            band = "green"
            ct_hr = hh["ct-decimal-hour"]
            weekend = hh["ct-day-of-week"] > 4
            try:
                slots = get_file_rates(ds.caches, ds.dno_code, start_date)[
                    ds.gsp_group_code
                ]["bands"]
            except KeyError as e:
                raise BadRequest(str(e))

            for slot in slots:
                slot_weekend = slot["weekend"] == 1
                if slot_weekend == weekend and slot["start"] <= ct_hr < slot["finish"]:
                    band = slot["band"]
                    break

            bands_cache[start_date] = band

    try:
        laf = dno_cache["lafs"][ds.llfc_code][start_date]
    except KeyError:
        try:
            laf_cache = dno_cache["lafs"]
        except KeyError:
            laf_cache = dno_cache["lafs"] = {}

        try:
            laf_cache_llfc = laf_cache[ds.llfc_code]
        except KeyError:
            laf_cache_llfc = laf_cache[ds.llfc_code] = {}

        try:
            laf = laf_cache_llfc[start_date]
        except KeyError:
            dno_code = ds.dno_code
            if dno_code in ("88", "99"):
                laf_cache_llfc[start_date] = 1
            else:

                for (laf,) in ds.sess.execute(
                    select(Laf)
                    .join(Llfc)
                    .join(Party)
                    .where(
                        Party.dno_code == ds.dno_code,
                        Llfc.code == ds.llfc_code,
                        Laf.timestamp >= ds.start_date,
                        Laf.timestamp <= ds.finish_date,
                    )
                ):
                    laf_cache_llfc[laf.timestamp] = float(laf.value)

            try:
                laf = laf_cache_llfc[start_date]
            except KeyError:
                for cand in (hh["hist-start"], ds.forecast_date):
                    laf_obj = ds.sess.execute(
                        select(Laf)
                        .join(Llfc)
                        .join(Party)
                        .where(
                            Party.dno_code == ds.dno_code,
                            Llfc.code == ds.llfc_code,
                            Laf.timestamp == cand,
                        )
                    ).scalar_one_or_none()
                    if laf_obj is not None:
                        laf_cache_llfc[start_date] = float(laf_obj.value)
                        break

                try:
                    laf = laf_cache_llfc[start_date]
                except KeyError:
                    raise BadRequest(
                        f"Missing LAF for DNO {ds.dno_code} and LLFC {ds.llfc_code} "
                        f"and timestamps {hh_format(start_date)}, "
                        f"{hh_format(hh['hist-start'])} and "
                        f"{hh_format(ds.forecast_date)}"
                    )

    hh["laf"] = laf
    hh["gsp-kwh"] = laf * hh["msp-kwh"]
    hh["gsp-kw"] = hh["gsp-kwh"] * 2

    kvarh = max(
        max(imp_msp_kvarh, exp_msp_kvarh) - (0.95 ** -2 - 1) ** 0.5 * hh["msp-kwh"], 0
    )

    hh["duos-reactive-kvarh"] = kvarh

    duos_reactive_rate = tariff["gbp-per-kvarh"]
    if duos_reactive_rate is not None:
        duos_reactive_rate = float(duos_reactive_rate)
        if duos_reactive_rate != 0:
            hh["duos-reactive-rate"] = duos_reactive_rate
            hh["duos-reactive-gbp"] = kvarh * duos_reactive_rate

    rate = float(tariff[KEYS[band]["tariff-rate"]])
    hh[KEYS[band]["bill-rate"]] = rate
    hh[KEYS[band]["kwh"]] = hh["msp-kwh"]
    hh[KEYS[band]["gbp"]] = rate * hh["msp-kwh"]

    if hh["ct-decimal-hour"] == 23.5 and not ds.is_displaced:
        hh["duos-fixed-days"] = 1
        rate = float(tariff["gbp-per-mpan-per-day"])
        hh["duos-fixed-rate"] = rate
        hh["duos-fixed-gbp"] = rate

        hh["duos-availability-days"] = 1
        kva = ds.sc
        hh["duos-availability-kva"] = kva
        rate = float(tariff["gbp-per-kva-per-day"])
        hh["duos-availability-rate"] = rate
        hh["duos-availability-gbp"] = rate * kva

    if hh["ct-is-month-end"] and not ds.is_displaced:
        month_to = start_date
        month_from = to_utc(ct_datetime(hh["ct-year"], hh["ct-month"], 1))
        md_kva = 0
        days_in_month = 0
        for dsc in ds.get_data_sources(month_from, month_to):
            for datum in dsc.hh_data:
                md_kva = max(
                    md_kva,
                    (
                        datum["msp-kw"] ** 2
                        + max(datum["imp-msp-kvar"], datum["exp-msp-kvar"]) ** 2
                    )
                    ** 0.5,
                )
                if datum["ct-decimal-hour"] == 0:
                    days_in_month += 1

        excess_kva = max(md_kva - ds.sc, 0)

        if "excess-gbp-per-kva-per-day" in tariff and excess_kva != 0:
            rate = float(tariff["excess-gbp-per-kva-per-day"])
            hh["duos-excess-availability-kva"] = excess_kva
            rate = float(tariff["excess-gbp-per-kva-per-day"])
            hh["duos-excess-availability-rate"] = rate
            hh["duos-excess-availability-days"] = days_in_month
            hh["duos-excess-availability-gbp"] = rate * excess_kva * days_in_month
Пример #19
0
def get_date_ct(row, name, datemode):
    val = get_value(row, name)
    if isinstance(val, float):
        return to_utc(Datetime(*xldate_as_tuple(val, datemode)))
Пример #20
0
def test_to_date():
    date_str = "20200430"
    row = [date_str]
    dt = chellow.bill_parser_haven_csv._to_date(row, 0)
    assert dt == to_utc(ct_datetime(2020, 4, 30))
Пример #21
0
    def run(self):
        while not self.stopped.isSet():
            if self.lock.acquire(False):
                sess = None
                try:
                    sess = Session()
                    self.log("Starting to check TLMs.")
                    contract = Contract.get_non_core_by_name(sess, 'tlms')
                    latest_rs = sess.query(RateScript).filter(
                        RateScript.contract_id == contract.id).order_by(
                        RateScript.start_date.desc()).first()
                    latest_rs_id = latest_rs.id
                    next_month_start = latest_rs.start_date + \
                        relativedelta(months=1)
                    next_month_finish = latest_rs.start_date + \
                        relativedelta(months=2) - HH

                    now = utc_datetime_now()
                    if now > next_month_start:
                        self.log(
                            "Checking to see if data is available from " +
                            str(next_month_start) + " to " +
                            str(next_month_finish) + " on Elexon Portal.")
                        config = Contract.get_non_core_by_name(
                            sess, 'configuration')
                        props = config.make_properties()

                        scripting_key = props.get(
                            ELEXON_PORTAL_SCRIPTING_KEY_KEY)
                        if scripting_key is None:
                            raise BadRequest(
                                "The property " +
                                ELEXON_PORTAL_SCRIPTING_KEY_KEY +
                                " cannot be found in the configuration " +
                                "properties.")

                        contract_props = contract.make_properties()
                        url_str = ''.join(
                            (
                                contract_props['url'],
                                'file/download/TLM_FILE?key=',
                                scripting_key))

                        r = requests.get(url_str)
                        parser = csv.reader(
                            (l.decode() for l in r.iter_lines()),
                            delimiter=',', quotechar='"')
                        self.log("Opened " + url_str + ".")

                        next(parser, None)
                        month_tlms = {}
                        for values in parser:
                            hh_date_ct = to_ct(
                                Datetime.strptime(values[0], "%d/%m/%Y"))
                            hh_date = to_utc(hh_date_ct)
                            hh_date += relativedelta(minutes=30*int(values[2]))
                            if next_month_start <= hh_date <= \
                                    next_month_finish:
                                month_tlms[key_format(hh_date)] = {
                                    'off-taking': values[3],
                                    'delivering': values[4]}

                        if key_format(next_month_finish) in month_tlms:
                            self.log("The whole month's data is there.")
                            script = "def tlms():\n    return {\n" + \
                                ',\n'.join(
                                    "'" + k + "': " +
                                    month_tlms[k]['off-taking'] for k in
                                    sorted(month_tlms.keys())) + "}"
                            contract = Contract.get_non_core_by_name(
                                sess, 'tlms')
                            rs = RateScript.get_by_id(sess, latest_rs_id)
                            contract.update_rate_script(
                                sess, rs, rs.start_date,
                                rs.start_date + relativedelta(months=2) - HH,
                                rs.script)
                            sess.flush()
                            contract.insert_rate_script(
                                sess, rs.start_date + relativedelta(months=1),
                                script)
                            sess.commit()
                            self.log("Added new rate script.")
                        else:
                            msg = "There isn't a whole month there yet."
                            if len(month_tlms) > 0:
                                msg += "The last date is " + \
                                    sorted(month_tlms.keys())[-1]
                            self.log(msg)
                except:
                    self.log("Outer problem " + traceback.format_exc())
                    sess.rollback()
                finally:
                    if sess is not None:
                        sess.close()
                    self.lock.release()
                    self.log("Finished checking TLM rates.")

            self.going.wait(30 * 60)
            self.going.clear()
Пример #22
0
def get_date(title_row, row, name, datemode):
    val = get_value(title_row, row, name)
    if isinstance(val, float):
        return to_utc(Datetime(*xldate_as_tuple(val, datemode)))
    else:
        return None
Пример #23
0
def _process_line(cache, sess, contract, log_func, values):
    hh_date_ct = to_ct(Datetime.strptime(values[0], "%d/%m/%Y"))
    hh_date = to_utc(hh_date_ct)
    hh_date += relativedelta(minutes=30 * (int(values[2]) - 1))
    run = values[1]
    gsp_group_code = GSP_GROUP_LOOKUP[values[3]]
    off_taking_str = values[4]

    try:
        off_taking = Decimal(off_taking_str)
    except InvalidOperation as e:
        raise BadRequest("Problem parsing 'off-taking' field '" +
                         off_taking_str + "' in the row " + str(values) +
                         ". " + str(e))

    delivering = Decimal(values[5])

    try:
        rs, rates, rts = cache[hh_date.year][hh_date.month]
    except KeyError:
        _save_cache(sess, cache)
        try:
            yr_cache = cache[hh_date.year]
        except KeyError:
            yr_cache = cache[hh_date.year] = {}

        rs = (sess.query(RateScript).filter(
            RateScript.contract == contract,
            RateScript.start_date <= hh_date,
            or_(RateScript.finish_date == null(),
                RateScript.finish_date >= hh_date),
        ).first())
        while rs is None:
            log_func("There's no rate script at " + hh_format(hh_date) + ".")
            latest_rs = (sess.query(RateScript).filter(
                RateScript.contract == contract).order_by(
                    RateScript.start_date.desc()).first())
            contract.update_rate_script(
                sess,
                latest_rs,
                latest_rs.start_date,
                latest_rs.start_date + relativedelta(months=2) - HH,
                loads(latest_rs.script),
            )
            new_rs_start = latest_rs.start_date + relativedelta(months=1)
            contract.insert_rate_script(sess, new_rs_start, {})
            sess.commit()
            log_func("Added a rate script starting at " +
                     hh_format(new_rs_start) + ".")

            rs = (sess.query(RateScript).filter(
                RateScript.contract == contract,
                RateScript.start_date <= hh_date,
                or_(
                    RateScript.finish_date == null(),
                    RateScript.finish_date >= hh_date,
                ),
            ).first())

        rates = loads(rs.script)

        try:
            rts = rates["tlms"]
        except KeyError:
            rts = rates["tlms"] = {}

        yr_cache[hh_date.month] = rs, rates, rts
        sess.rollback()

    key = key_format(hh_date)
    try:
        existing = rts[key]
    except KeyError:
        existing = rts[key] = {}

    try:
        group = existing[gsp_group_code]
    except KeyError:
        group = existing[gsp_group_code] = {}

    if run not in group:
        group[run] = {"off_taking": off_taking, "delivering": delivering}

        log_func("Found rate at " + hh_format(hh_date) + " for GSP Group " +
                 gsp_group_code + " and run " + run + ".")
Пример #24
0
def test_to_utc():
    dt_utc = to_utc(ct_datetime(2014, 9, 6, 1))
    assert dt_utc == Datetime(2014, 9, 6, 0, 0, tzinfo=utc)
Пример #25
0
def to_finish_date(component):
    d = to_ct_date(component)
    return to_utc(ct_datetime(d.year, d.month, d.day, 23, 30))
Пример #26
0
def _process_hh(ds, rate_period, est_kw, hh):
    month_start, month_finish = next(
        c_months_u(start_year=hh["ct-year"], start_month=hh["ct-month"]))

    month_start_ct = to_ct(month_start)
    if month_start_ct.month > 3:
        year = month_start_ct.year
    else:
        year = month_start_ct.year - 1
    financial_year_start = to_utc(ct_datetime(year, 4, 1))
    last_financial_year_start = to_utc(ct_datetime(year - 1, 4, 1))
    financial_year_finish = to_utc(ct_datetime(year + 1, 3, 31, 23, 30))

    est_triad_kws = []
    earliest_triad = None
    for dt in get_file_rates(ds.caches, "triad_dates",
                             last_financial_year_start)["triad_dates"]:
        triad_hh = None
        earliest_triad = hh_min(earliest_triad, dt)
        try:
            d = next(ds.get_data_sources(dt, dt, financial_year_start))
            chellow.duos.duos_vb(d)
            triad_hh = d.hh_data[0]

            while dt < financial_year_start:
                dt += relativedelta(years=1)

            for d in ds.get_data_sources(dt, dt, financial_year_start):
                chellow.duos.duos_vb(d)
                datum = d.hh_data[0]
                triad_hh["laf"] = datum["laf"]
                triad_hh["gsp-kw"] = datum["laf"] * triad_hh["msp-kw"]
        except StopIteration:
            triad_hh = {
                "hist-start": dt,
                "msp-kw": 0,
                "start-date": dt,
                "status": "before start of MPAN",
                "laf": 1,
                "gsp-kw": 0,
            }
        est_triad_kws.append(triad_hh)

    if ds.site is None:
        era = ds.supply.find_era_at(ds.sess, earliest_triad)
        if (era is None
                or era.get_channel(ds.sess, ds.is_import, "ACTIVE") is None
                and est_kw is None):
            est_kw = 0.85 * max(datum["msp-kwh"] for datum in ds.hh_data) * 2
        if est_kw is not None:
            for est_datum in est_triad_kws:
                est_datum["msp-kw"] = est_kw
                est_datum["gsp-kw"] = est_datum["msp-kw"] * est_datum["laf"]

    gsp_kw = 0
    for i, triad_hh in enumerate(est_triad_kws):
        triad_prefix = "triad-estimate-" + str(i + 1)
        hh[triad_prefix + "-date"] = triad_hh["hist-start"]
        hh[triad_prefix + "-msp-kw"] = triad_hh["msp-kw"]
        hh[triad_prefix + "-status"] = triad_hh["status"]
        hh[triad_prefix + "-laf"] = triad_hh["laf"]
        hh[triad_prefix + "-gsp-kw"] = triad_hh["gsp-kw"]
        gsp_kw += triad_hh["gsp-kw"]

    hh["triad-estimate-gsp-kw"] = gsp_kw / 3
    polarity = "import" if ds.llfc.is_import else "export"
    gsp_group_code = ds.gsp_group_code
    rate = float(
        get_file_rates(
            ds.caches, "triad_rates",
            month_start)["triad_gbp_per_gsp_kw"][polarity][gsp_group_code])

    hh["triad-estimate-rate"] = rate

    est_triad_gbp = hh["triad-estimate-rate"] * hh["triad-estimate-gsp-kw"]

    if rate_period == "monthly":
        total_intervals = 12

        est_intervals = 1
        hh["triad-estimate-months"] = est_intervals
    else:
        dt = financial_year_start
        total_intervals = 0
        while dt <= financial_year_finish:
            total_intervals += 1
            dt += relativedelta(days=1)

        est_intervals = 0
        for d in ds.get_data_sources(month_start, month_finish):
            for h in d.hh_data:
                if h["ct-decimal-hour"] == 0:
                    est_intervals += 1

        hh["triad-estimate-days"] = est_intervals

    hh["triad-estimate-gbp"] = est_triad_gbp / total_intervals * est_intervals

    if hh["ct-month"] == 3:
        triad_kws = []
        for t_date in get_file_rates(ds.caches, "triad_dates",
                                     month_start)["triad_dates"]:
            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh = d.hh_data[0]
                else:
                    thh = {
                        "hist-start": t_date,
                        "msp-kw": 0,
                        "start-date": t_date,
                        "status": "before contract",
                        "laf": "before contract",
                        "gsp-kw": 0,
                    }
            except StopIteration:
                thh = {
                    "hist-start": t_date,
                    "msp-kw": 0,
                    "start-date": t_date,
                    "status": "before start of supply",
                    "laf": "before start of supply",
                    "gsp-kw": 0,
                }

            while t_date < financial_year_start:
                t_date += relativedelta(years=1)

            try:
                d = next(ds.get_data_sources(t_date, t_date))
                if (ds.supplier_contract is None
                        or d.supplier_contract == ds.supplier_contract):
                    chellow.duos.duos_vb(d)
                    thh["laf"] = d.hh_data[0]["laf"]
                    thh["gsp-kw"] = thh["laf"] * thh["msp-kw"]
            except StopIteration:
                pass

            triad_kws.append(thh)
        gsp_kw = 0

        for i, triad_hh in enumerate(triad_kws):
            pref = "triad-actual-" + str(i + 1)
            hh[pref + "-date"] = triad_hh["start-date"]
            hh[pref + "-msp-kw"] = triad_hh["msp-kw"]
            hh[pref + "-status"] = triad_hh["status"]
            hh[pref + "-laf"] = triad_hh["laf"]
            hh[pref + "-gsp-kw"] = triad_hh["gsp-kw"]
            gsp_kw += triad_hh["gsp-kw"]

        hh["triad-actual-gsp-kw"] = gsp_kw / 3
        polarity = "import" if ds.llfc.is_import else "export"
        gsp_group_code = ds.gsp_group_code
        tot_rate = 0
        for start_date, finish_date, script in get_file_scripts("triad_rates"):
            if start_date <= financial_year_finish and not hh_before(
                    finish_date, financial_year_start):
                start_month = to_ct(start_date).month
                if start_month < 4:
                    start_month += 12

                if finish_date is None:
                    finish_month = 3
                else:
                    finish_month = to_ct(finish_date).month

                if finish_month < 4:
                    finish_month += 12

                rt = get_file_rates(
                    ds.caches, "triad_rates", start_date
                )["triad_gbp_per_gsp_kw"][polarity][gsp_group_code]
                tot_rate += (finish_month - start_month + 1) * float(rt)

        rate = tot_rate / 12
        hh["triad-actual-rate"] = rate

        hh["triad-actual-gbp"] = hh["triad-actual-rate"] * hh[
            "triad-actual-gsp-kw"]

        era = ds.supply.find_era_at(ds.sess, month_finish)
        est_intervals = 0

        interval = (relativedelta(
            months=1) if rate_period == "monthly" else relativedelta(days=1))

        dt = month_finish
        while era is not None and dt > financial_year_start:
            est_intervals += 1
            dt -= interval
            if hh_after(dt, era.finish_date):
                era = ds.supply.find_era_at(ds.sess, dt)

        if rate_period == "monthly":
            hh["triad-all-estimates-months"] = est_intervals
        else:
            hh["triad-all-estimates-days"] = est_intervals
        hh["triad-all-estimates-gbp"] = (est_triad_gbp / total_intervals *
                                         est_intervals * -1)
Пример #27
0
def content(
    batch_id,
    bill_id,
    contract_id,
    start_date,
    finish_date,
    user,
    mpan_cores,
    fname_additional,
):
    caches = {}
    tmp_file = sess = supply_id = None
    forecast_date = to_utc(Datetime.max)

    try:
        running_name, finished_name = chellow.dloads.make_names(
            f"bill_check_{fname_additional}.csv", user)
        tmp_file = open(running_name, mode="w", newline="")
        writer = csv.writer(tmp_file, lineterminator="\n")

        sess = Session()
        report_run = ReportRun.insert(sess, "bill_check", user,
                                      fname_additional, {})

        bills = (sess.query(Bill).order_by(
            Bill.supply_id, Bill.reference).options(
                joinedload(Bill.supply),
                subqueryload(Bill.reads).joinedload(RegisterRead.present_type),
                subqueryload(Bill.reads).joinedload(
                    RegisterRead.previous_type),
                joinedload(Bill.batch),
            ))

        if len(mpan_cores) > 0:
            mpan_cores = list(map(parse_mpan_core, mpan_cores))
            supply_ids = [
                i[0] for i in sess.query(Era.supply_id).filter(
                    or_(
                        Era.imp_mpan_core.in_(mpan_cores),
                        Era.exp_mpan_core.in_(mpan_cores),
                    )).distinct()
            ]
            bills = bills.join(Supply).filter(Supply.id.in_(supply_ids))

        if batch_id is not None:
            batch = Batch.get_by_id(sess, batch_id)
            bills = bills.filter(Bill.batch == batch)
            contract = batch.contract
        elif bill_id is not None:
            bill = Bill.get_by_id(sess, bill_id)
            bills = bills.filter(Bill.id == bill.id)
            contract = bill.batch.contract
        elif contract_id is not None:
            contract = Contract.get_by_id(sess, contract_id)
            bills = bills.join(Batch).filter(
                Batch.contract == contract,
                Bill.start_date <= finish_date,
                Bill.finish_date >= start_date,
            )

        vbf = chellow.computer.contract_func(caches, contract, "virtual_bill")
        if vbf is None:
            raise BadRequest(
                f"The contract {contract.name} doesn't have a function virtual_bill."
            )

        virtual_bill_titles_func = chellow.computer.contract_func(
            caches, contract, "virtual_bill_titles")
        if virtual_bill_titles_func is None:
            raise BadRequest(
                f"The contract {contract.name} doesn't have a function "
                f"virtual_bill_titles.")
        virtual_bill_titles = virtual_bill_titles_func()

        titles = [
            "batch",
            "bill-reference",
            "bill-type",
            "bill-kwh",
            "bill-net-gbp",
            "bill-vat-gbp",
            "bill-start-date",
            "bill-finish-date",
            "imp-mpan-core",
            "exp-mpan-core",
            "site-code",
            "site-name",
            "covered-from",
            "covered-to",
            "covered-bills",
            "metered-kwh",
        ]
        for t in virtual_bill_titles:
            titles.append("covered-" + t)
            titles.append("virtual-" + t)
            if t.endswith("-gbp"):
                titles.append("difference-" + t)

        writer.writerow(titles)

        bill_map = defaultdict(set, {})
        for bill in bills:
            bill_map[bill.supply.id].add(bill.id)

        for supply_id in bill_map.keys():
            _process_supply(
                sess,
                caches,
                supply_id,
                bill_map,
                forecast_date,
                contract,
                vbf,
                virtual_bill_titles,
                writer,
                titles,
                report_run,
            )

        report_run.update("finished")
        sess.commit()

    except BadRequest as e:
        if report_run is not None:
            report_run.update("problem")
        if supply_id is None:
            prefix = "Problem: "
        else:
            prefix = f"Problem with supply {supply_id}:"
        tmp_file.write(prefix + e.description)
    except BaseException:
        if report_run is not None:
            report_run.update("interrupted")
        if supply_id is None:
            prefix = "Problem: "
        else:
            prefix = f"Problem with supply {supply_id}:"
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        tmp_file.write(prefix + msg)
    finally:
        if sess is not None:
            sess.close()
        tmp_file.close()
        os.rename(running_name, finished_name)
Пример #28
0
def parse_date(date_str):
    if len(date_str) == 0:
        return None
    else:
        return to_utc(to_ct(Datetime.strptime(date_str, "%d/%m/%Y")))
Пример #29
0
    def run(self):
        while not self.stopped.isSet():
            if self.lock.acquire(False):
                sess = book = sheet = None
                try:
                    sess = Session()
                    self.log("Starting to check BSUoS rates.")
                    contract = Contract.get_non_core_by_name(sess, 'bsuos')
                    latest_rs = sess.query(RateScript).filter(
                        RateScript.contract == contract).order_by(
                        RateScript.start_date.desc()).first()
                    latest_rs_id = latest_rs.id
                    this_month_start = latest_rs.start_date + \
                        relativedelta(months=1)
                    next_month_start = this_month_start + \
                        relativedelta(months=1)
                    now = utc_datetime_now()
                    props = contract.make_properties()
                    if props.get('enabled', False):

                        if now > next_month_start:
                            url = props['url']
                            self.log(
                                "Checking to see if data is available from " +
                                str(this_month_start) + " to " +
                                str(next_month_start - HH) +
                                " at " + url)
                            res = requests.get(url)
                            self.log(
                                "Received " + str(res.status_code) + " " +
                                res.reason)
                            book = xlrd.open_workbook(
                                file_contents=res.content)
                            sheet = book.sheet_by_index(0)

                            month_bsuos = {}
                            for row_index in range(1, sheet.nrows):
                                row = sheet.row(row_index)
                                raw_date = Datetime(
                                    *xlrd.xldate_as_tuple(
                                        row[0].value, book.datemode))
                                hh_date_ct = to_ct(raw_date)
                                hh_date = to_utc(hh_date_ct)
                                hh_date += relativedelta(
                                    minutes=30*int(row[1].value))
                                if not hh_date < this_month_start and \
                                        hh_date < next_month_start:
                                    month_bsuos[key_format(hh_date)] = \
                                        row[2].value

                            if key_format(next_month_start - HH) in \
                                    month_bsuos:
                                self.log("The whole month's data is there.")
                                script = "def rates_gbp_per_mwh():\n    " \
                                    "return {\n" + ',\n'.join(
                                        "'" + k + "': " + str(month_bsuos[k])
                                        for k in sorted(
                                            month_bsuos.keys())) + "}"
                                contract = Contract.get_non_core_by_name(
                                    sess, 'bsuos')
                                rs = RateScript.get_by_id(sess, latest_rs_id)
                                contract.update_rate_script(
                                    sess, rs, rs.start_date,
                                    rs.start_date + relativedelta(months=2) -
                                    HH, rs.script)
                                sess.flush()
                                contract.insert_rate_script(
                                    sess,
                                    rs.start_date + relativedelta(months=1),
                                    script)
                                sess.commit()
                                self.log("Added new rate script.")
                            else:
                                self.log(
                                    "There isn't a whole month there yet. The "
                                    "last date is " +
                                    sorted(month_bsuos.keys())[-1])
                    else:
                        self.log(
                            "The automatic importer is disabled. To "
                            "enable it, edit the contract properties to "
                            "set 'enabled' to True.")
                except:
                    self.log("Outer problem " + traceback.format_exc())
                    sess.rollback()
                finally:
                    book = sheet = None
                    if sess is not None:
                        sess.close()
                    self.lock.release()
                    self.log("Finished checking BSUoS rates.")

            self.going.wait(30 * 60)
            self.going.clear()
Пример #30
0
    def run_inner(self, sess):
        self.log("Starting to check GCv rates.")
        contract = Contract.get_non_core_by_name(sess, 'g_cv')
        latest_rs = sess.query(RateScript).filter(
            RateScript.contract == contract).order_by(
            RateScript.start_date.desc()).first()
        latest_rs_id = latest_rs.id
        this_month_start = latest_rs.start_date + relativedelta(months=1)
        next_month_start = this_month_start + relativedelta(months=1)
        now = utc_datetime_now()
        props = contract.make_properties()
        if props.get('enabled', False):
            search_finish = next_month_start + relativedelta(days=1)
            if now > search_finish:
                url = props['url']
                self.log(
                    "Checking to see if data is available from " +
                    hh_format(this_month_start) + " to " +
                    hh_format(search_finish) + " at " + url)

                res = requests.post(
                    url, data={
                        'LatestValue': 'true',
                        'PublicationObjectIds':
                            '408:12265,+408:4636,+408:4637,+408:4639,'
                            '+408:4638,+408:4640,+408:4641,+408:4642,'
                            '+408:4643,+408:4644,+408:4645,+408:4646,'
                            '+408:4647,+408:4648,+408:12269,+408:12268,'
                            '+408:12270,+408:12266,+408:12267',
                        'Applicable': 'applicableFor',
                        'PublicationObjectCount': '19',
                        'FromUtcDatetime': param_format(this_month_start),
                        'ToUtcDateTime': param_format(search_finish),
                        'FileType': 'Csv'})
                self.log("Received " + str(res.status_code) + " " + res.reason)

                month_cv = defaultdict(dict)
                cf = csv.reader(res.text.splitlines())
                row = next(cf)  # Skip title row
                for row in cf:
                    applicable_for_str = row[1]
                    data_item = row[2]
                    value_str = row[3]

                    if 'LDZ' in data_item:
                        applicable_for = to_utc(
                            Datetime.strptime(applicable_for_str, "%d/%m/%Y"))
                        cvs = month_cv[applicable_for]
                        ldz = data_item[-3:-1]
                        cvs[ldz] = Decimal(value_str)

                all_equal = len(set(map(len, month_cv.values()))) <= 1
                last_date = max(month_cv.keys())
                if last_date > next_month_start and all_equal:
                    for dt in tuple(month_cv.keys()):
                        if (dt.year, dt.month) != (
                                this_month_start.year, this_month_start.month):
                            del month_cv[dt]
                    self.log("The whole month's data is there.")
                    month_ion = [v for k, v in sorted(month_cv.items())]
                    script = dumps(month_ion)
                    contract = Contract.get_non_core_by_name(sess, 'g_cv')
                    rs = RateScript.get_by_id(sess, latest_rs_id)
                    contract.update_rate_script(
                        sess, rs, rs.start_date,
                        rs.start_date + relativedelta(months=2) - HH,
                        rs.script)
                    sess.flush()
                    contract.insert_rate_script(
                        sess,
                        rs.start_date + relativedelta(months=1),
                        script)
                    sess.commit()
                    self.log("Added new rate script.")
                else:
                    self.log(
                        "There isn't a whole month there yet. The "
                        "last date is " + hh_format(last_date) + ".")
        else:
            self.log(
                "The automatic importer is disabled. To "
                "enable it, edit the contract properties to "
                "set 'enabled' to True.")
Пример #31
0
        return None
    else:
        dt = to_ct(Datetime.strptime(date_str, "%d/%m/%Y"))
        dt += Timedelta(hours=23, minutes=30)
        return to_utc(dt)


def is_common_mtc(code):
    return 499 < code < 510 or 799 < code < 1000


def do_get(sess):
    return render_template("report_163.html")


VOLTAGE_MAP = {"24": {"602": {to_utc(ct_datetime(2010, 4, 1)): "LV"}}}


def _parse_Line_Loss_Factor_Class(sess, csv_reader):
    VOLTAGE_LEVEL_CODES = set([v.code for v in sess.query(VoltageLevel)])
    DNO_MAP = dict(
        (dno.participant.code, dno)
        for dno in sess.query(Party).join(MarketRole).filter(
            MarketRole.code == "R").options(joinedload(Party.participant)))

    rows = []
    for values in csv_reader:
        participant_code = values[0]
        # market_role_code = values[1]
        llfc_code = values[3].zfill(3)
        valid_from = parse_date(values[4])
Пример #32
0
def content(supply_id, start_date, finish_date, user):
    forecast_date = to_utc(Datetime.max)
    caches = {}
    f = sess = era = None
    try:
        sess = Session()
        running_name, finished_name = chellow.dloads.make_names(
            'supplies_duration.csv', user)
        f = open(running_name, mode='w', newline='')
        w = csv.writer(f, lineterminator='\n')
        w.writerow(
            (
                "Era Start", "Era Finish", "Supply Id", "Supply Name",
                "Source", "Generator Type", "Site Code", "Site Name",
                "Associated Site Codes", "From", "To", "PC", "MTC", "CoP",
                "SSC", "Properties", "MOP Contract", "MOP Account",
                "DC Contract", "DC Account", "Normal Reads", "Type",
                "Supply Start", "Supply Finish", "Import LLFC",
                "Import MPAN Core", "Import Supply Capacity",
                "Import Supplier",
                "Import Total MSP kWh", "Import Non-actual MSP kWh",
                "Import Total GSP kWh", "Import MD / kW", "Import MD Date",
                "Import MD / kVA",
                "Import Bad HHs", "Export LLFC", "Export MPAN Core",
                "Export Supply Capacity", "Export Supplier",
                "Export Total MSP kWh", "Export Non-actual MSP kWh",
                "Export GSP kWh", "Export MD / kW", "Export MD Date",
                "Export MD / kVA", "Export Bad HHs"))

        eras = sess.query(Era).filter(
            or_(Era.finish_date == null(), Era.finish_date >= start_date),
            Era.start_date <= finish_date).order_by(
                Era.supply_id, Era.start_date).options(
            joinedload(Era.supply),
            joinedload(Era.supply).joinedload(Supply.source),
            joinedload(Era.supply).joinedload(Supply.generator_type),
            joinedload(Era.imp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.exp_llfc).joinedload(Llfc.voltage_level),
            joinedload(Era.imp_llfc),
            joinedload(Era.exp_llfc),
            joinedload(Era.mop_contract),
            joinedload(Era.dc_contract),
            joinedload(Era.channels),
            joinedload(Era.site_eras).joinedload(SiteEra.site),
            joinedload(Era.pc), joinedload(Era.cop),
            joinedload(Era.mtc).joinedload(Mtc.meter_type),
            joinedload(Era.imp_supplier_contract),
            joinedload(Era.exp_supplier_contract),
            joinedload(Era.ssc),
            joinedload(Era.site_eras))

        if supply_id is not None:
            eras = eras.filter(Era.supply == Supply.get_by_id(sess, supply_id))

        for era in eras:
            supply = era.supply
            site_codes = set()
            site = None
            for site_era in era.site_eras:
                if site_era.is_physical:
                    site = site_era.site
                else:
                    site_codes.add(site_era.site.code)

            sup_eras = sess.query(Era).filter(
                Era.supply == supply).order_by(Era.start_date).all()
            supply_start = sup_eras[0].start_date
            supply_finish = sup_eras[-1].finish_date

            if supply.generator_type is None:
                generator_type = ''
            else:
                generator_type = supply.generator_type.code

            ssc = era.ssc
            ssc_code = '' if ssc is None else ssc.code

            prime_reads = set()
            for read, rdate in chain(
                    sess.query(
                        RegisterRead, RegisterRead.previous_date).join(
                        RegisterRead.previous_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.previous_date >= start_date,
                    RegisterRead.previous_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES)).options(
                        joinedload(RegisterRead.bill)),

                    sess.query(
                        RegisterRead, RegisterRead.present_date).join(
                        RegisterRead.present_type).join(Bill).join(
                        BillType).filter(
                    Bill.supply == supply, BillType.code != 'W',
                    RegisterRead.present_date >= start_date,
                    RegisterRead.present_date <= finish_date,
                    ReadType.code.in_(NORMAL_READ_TYPES)).options(
                        joinedload(RegisterRead.bill))):
                prime_bill = sess.query(Bill).join(BillType).filter(
                    Bill.supply == supply,
                    Bill.start_date <= read.bill.finish_date,
                    Bill.finish_date >= read.bill.start_date,
                    Bill.reads.any()).order_by(
                    Bill.issue_date.desc(), BillType.code).first()
                if prime_bill.id == read.bill.id:
                    prime_reads.add(str(rdate) + "_" + read.msn)

            supply_type = era.meter_category

            chunk_start = hh_max(era.start_date, start_date)
            chunk_finish = hh_min(era.finish_date, finish_date)
            num_hh = int(
                (chunk_finish + HH - chunk_start).total_seconds() / (30 * 60))

            w.writerow(
                [
                    hh_format(era.start_date),
                    hh_format(era.finish_date, ongoing_str=''),
                    supply.id, supply.name, supply.source.code, generator_type,
                    site.code, site.name, '| '.join(sorted(site_codes)),
                    hh_format(start_date), hh_format(finish_date), era.pc.code,
                    era.mtc.code, era.cop.code, ssc_code, era.properties,
                    era.mop_contract.name, era.mop_account,
                    era.dc_contract.name, era.dc_account, len(prime_reads),
                    supply_type, hh_format(supply_start),
                    hh_format(supply_finish, ongoing_str='')] + mpan_bit(
                    sess, supply, True, num_hh, era, chunk_start,
                    chunk_finish, forecast_date, caches) + mpan_bit(
                    sess, supply, False, num_hh, era, chunk_start,
                    chunk_finish, forecast_date, caches))

            # Avoid a long-running transaction
            sess.rollback()
    except BadRequest as e:
        if era is None:
            pref = "Problem: "
        else:
            pref = "Problem with era " + chellow.utils.url_root + "eras/" + \
                str(era.id) + "/edit : "
        f.write(pref + e.description)
    except BaseException as e:
        if era is None:
            pref = "Problem: "
        else:
            pref = "Problem with era " + str(era.id) + ": "
        f.write(pref + str(e))
        f.write(traceback.format_exc())
    finally:
        sess.close()
        f.close()
        os.rename(running_name, finished_name)
Пример #33
0
def _to_finish_date(date_str):
    return to_utc(
        to_ct(
            Datetime.strptime(date_str, "%y%m%d") + relativedelta(days=1) -
            HH))
Пример #34
0
def _make_site_deltas(sess, report_context, site, scenario_hh, forecast_from,
                      supply_id):
    site_scenario_hh = scenario_hh.get(site.code, {})

    site_deltas = {"hhs": {}}
    delts = site_deltas["supply_deltas"] = {}
    for is_import in (True, False):
        delts[is_import] = {}
        for src in ("gen", "net", "gen-net", "3rd-party", "3rd-party-reverse",
                    "sub"):
            delts[is_import][src] = {"site": {}}

    earliest_delta = to_utc(Datetime.max)
    latest_delta = to_utc(Datetime.min)

    found_hh = False
    for typ in ("used", "generated", "parasitic", "gen_net"):
        hh_str = site_scenario_hh.get(typ, "")
        hh_data = site_scenario_hh[typ] = {}
        for row in csv.reader(StringIO(hh_str)):
            cells = [cell.strip() for cell in row]
            if len("".join(cells)) == 0:
                continue

            if len(cells) != 2:
                raise BadRequest(
                    f"Can't interpret the row {cells} it should be of the form "
                    f"'timestamp, kWh'")

            date_str, kwh_str = cells
            ts = parse_hh_start(date_str)
            earliest_delta = min(ts, earliest_delta)
            latest_delta = max(ts, latest_delta)
            try:
                hh_data[ts] = float(kwh_str)
            except ValueError as e:
                raise BadRequest(
                    f"When looking at {typ} hh data, can't parse the kWh at {date_str} "
                    f": {e}")
            found_hh = True

    if not found_hh:
        return site_deltas

    scenario_used = site_scenario_hh["used"]
    scenario_generated = site_scenario_hh["generated"]
    scenario_parasitic = site_scenario_hh["parasitic"]
    scenario_gen_net = site_scenario_hh["gen_net"]

    earliest_delta_ct = to_ct(earliest_delta)
    for month_start, month_finish in c_months_u(earliest_delta_ct.year,
                                                earliest_delta_ct.month,
                                                months=None):
        if month_start > latest_delta:
            break
        chunk_start = hh_max(month_start, earliest_delta)
        chunk_finish = hh_min(month_finish, latest_delta)

        site_ds = chellow.computer.SiteSource(sess, site, chunk_start,
                                              chunk_finish, forecast_from,
                                              report_context)
        hh_map = dict((h["start-date"], h) for h in site_ds.hh_data)

        for era in (sess.query(Era).join(SiteEra).join(Pc).filter(
                SiteEra.site == site,
                SiteEra.is_physical == true(),
                Era.imp_mpan_core != null(),
                Pc.code != "00",
                Era.start_date <= chunk_finish,
                or_(Era.finish_date == null(), Era.finish_date >= chunk_start),
        )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              True, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                sdatum["import-net-kwh"] += hh["msp-kwh"]
                sdatum["used-kwh"] += hh["msp-kwh"]

        for era in (sess.query(Era).join(SiteEra).join(Pc).join(Supply).join(
                Source).filter(
                    SiteEra.site == site,
                    SiteEra.is_physical == true(),
                    Era.imp_mpan_core != null(),
                    Era.start_date <= chunk_finish,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= chunk_start),
                    Source.code == "gen-net",
                )):

            if supply_id is not None and era.supply_id != supply_id:
                continue

            ss_start = hh_max(era.start_date, chunk_start)
            ss_finish = hh_min(era.finish_date, chunk_finish)

            ss = SupplySource(sess, ss_start, ss_finish, forecast_from, era,
                              False, report_context)

            for hh in ss.hh_data:
                sdatum = hh_map[hh["start-date"]]
                try:
                    sdatum["gen-net-kwh"] += hh["msp-kwh"]
                except KeyError:
                    sdatum["gen-net-kwh"] = hh["msp-kwh"]

        for hh_start, hh in hh_map.items():
            if hh_start in scenario_used:
                used_delt = scenario_used[hh_start] - hh["used-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if used_delt < 0:
                    diff = hh["import-net-kwh"] + used_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += used_delt
                else:
                    diff = hh["export-net-kwh"] - used_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= used_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["used-kwh"] += used_delt
                hh["msp-kwh"] -= exp_net_delt

            if hh_start in scenario_generated:
                imp_gen_delt = scenario_generated[hh_start] - hh[
                    "import-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if imp_gen_delt < 0:
                    diff = hh["export-net-kwh"] + imp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt += imp_gen_delt
                else:
                    diff = hh["import-net-kwh"] - imp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt -= imp_gen_delt

                try:
                    delts[True]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[True]["gen"]["site"][hh_start] = imp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["import-gen-kwh"] += imp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_parasitic:
                exp_gen_delt = scenario_parasitic[hh_start] - hh[
                    "export-gen-kwh"]
                imp_net_delt = 0
                exp_net_delt = 0

                if exp_gen_delt < 0:
                    diff = hh["import-net-kwh"] + exp_gen_delt
                    if diff < 0:
                        imp_net_delt -= hh["import-net-kwh"]
                        exp_net_delt -= diff
                    else:
                        imp_net_delt += exp_gen_delt
                else:
                    diff = hh["export-net-kwh"] - exp_gen_delt
                    if diff < 0:
                        exp_net_delt -= hh["export-net-kwh"]
                        imp_net_delt -= diff
                    else:
                        exp_net_delt -= exp_gen_delt

                try:
                    delts[False]["gen"]["site"][hh_start] += imp_gen_delt
                except KeyError:
                    delts[False]["gen"]["site"][hh_start] = exp_gen_delt

                try:
                    delts[False]["net"]["site"][hh_start] += exp_net_delt
                except KeyError:
                    delts[False]["net"]["site"][hh_start] = exp_net_delt

                try:
                    delts[True]["net"]["site"][hh_start] += imp_net_delt
                except KeyError:
                    delts[True]["net"]["site"][hh_start] = imp_net_delt

                hh["import-net-kwh"] += imp_net_delt
                hh["export-net-kwh"] += exp_net_delt
                hh["export-gen-kwh"] += exp_gen_delt
                hh["msp-kwh"] -= imp_net_delt

            if hh_start in scenario_gen_net:
                gen_net_delt = scenario_gen_net[hh_start] - hh["gen-net-kwh"]

                try:
                    delts[False]["gen-net"]["site"][hh_start] += gen_net_delt
                except KeyError:
                    delts[False]["gen-net"]["site"][hh_start] = gen_net_delt

                hh["import-gen-kwh"] += gen_net_delt
                hh["export-net-kwh"] += gen_net_delt

            site_deltas["hhs"][hh_start] = hh

    sup_deltas = site_deltas["supply_deltas"][False]["net"]["site"]
    if all(v == 0 for v in sup_deltas.values()):
        sup_deltas.clear()

    return site_deltas
Пример #35
0
def content(g_batch_id, g_bill_id, user):
    forecast_date = to_utc(Datetime.max)
    report_context = {}
    sess = tmp_file = None
    try:
        sess = Session()

        running_name, finished_name = chellow.dloads.make_names(
            'g_bill_check.csv', user)
        tmp_file = open(running_name, "w")
        csv_writer = csv.writer(tmp_file)
        if g_batch_id is not None:
            g_batch = GBatch.get_by_id(sess, g_batch_id)
            g_bills = sess.query(GBill).filter(
                GBill.g_batch == g_batch).order_by(GBill.reference)
        elif g_bill_id is not None:
            g_bill = GBill.get_by_id(sess, g_bill_id)
            g_bills = sess.query(GBill).filter(GBill.id == g_bill.id)
            g_batch = g_bill.g_batch

        g_contract = g_batch.g_contract

        vbf = chellow.g_engine.g_contract_func(
            report_context, g_contract, 'virtual_bill')
        if vbf is None:
            raise BadRequest(
                'The contract ' + g_contract.name +
                " doesn't have a function virtual_bill.")

        header_titles = [
            'batch', 'bill_reference', 'bill_type', 'bill_start_date',
            'bill_finish_date', 'mprn', 'supply_name', 'site_code',
            'site_name', 'covered_start', 'covered_finish', 'covered_bill_ids']
        bill_titles = chellow.g_engine.g_contract_func(
            report_context, g_contract, 'virtual_bill_titles')()

        titles = header_titles[:]
        for title in bill_titles:
            for prefix in ('covered_', 'virtual_'):
                titles.append(prefix + title)
            if title.endswith('_gbp'):
                titles.append('difference_' + title)

        csv_writer.writerow(titles)
        for g_bill in g_bills:
            problem = ''
            g_supply = g_bill.g_supply
            read_dict = collections.defaultdict(set)
            for g_read in g_bill.g_reads:
                if not all(
                        g_read.msn == era.msn for era in g_supply.find_g_eras(
                            sess, g_read.prev_date, g_read.pres_date)):
                    problem += "The MSN " + g_read.msn + \
                        " of the register read " + str(g_read.id) + \
                        " doesn't match the MSN of all the relevant eras."

                for dt, typ in [
                        (g_read.pres_date, g_read.pres_type),
                        (g_read.prev_date, g_read.prev_type)]:
                    typ_set = read_dict[str(dt) + "-" + g_read.msn]
                    typ_set.add(typ)
                    if len(typ_set) > 1:
                            problem += " Reads taken on " + str(dt) + \
                                " have differing read types."

            g_era = g_supply.find_g_era_at(sess, g_bill.finish_date)
            if g_era is None:
                csv_writer.writerow(
                    [
                        "Extraordinary! There isn't a era for this bill!"])
                continue

            vals = {
                    'batch': g_batch.reference,
                    'bill_reference': g_bill.reference,
                    'bill_type': g_bill.bill_type.code,
                    'bill_start_date': g_bill.start_date,
                    'bill_finish_date': g_bill.finish_date,
                    'mprn': g_supply.mprn, 'covered_vat_gbp': Decimal('0.00'),
                    'covered_net_gbp': Decimal('0.00'),
                    'covered_gross_gbp': Decimal('0.00'),
                    'covered_kwh': Decimal(0),
                    'covered_start': g_bill.start_date,
                    'covered_finish': g_bill.finish_date,
                    'covered_bill_ids': []}

            covered_primary_bill = None
            enlarged = True

            while enlarged:
                enlarged = False
                for covered_bill in sess.query(GBill).filter(
                        GBill.g_supply_id == g_supply.id,
                        GBill.start_date <= vals['covered_finish'],
                        GBill.finish_date >= vals['covered_start']).order_by(
                        GBill.issue_date.desc(), GBill.start_date):

                    if covered_primary_bill is None and \
                            len(covered_bill.g_reads) > 0:
                        covered_primary_bill = covered_bill
                    if covered_bill.start_date < vals['covered_start']:
                        vals['covered_start'] = covered_bill.start_date
                        enlarged = True
                        break
                    if covered_bill.finish_date > vals['covered_finish']:
                        vals['covered_finish'] = covered_bill.finish_date
                        enlarged = True
                        break

            for covered_bill in sess.query(GBill).filter(
                    GBill.g_supply == g_supply,
                    GBill.start_date <= vals['covered_finish'],
                    GBill.finish_date >= vals['covered_start']).order_by(
                    GBill.issue_date.desc(), GBill.start_date):
                vals['covered_bill_ids'].append(covered_bill.id)
                bdown = covered_bill.make_breakdown()
                vals['covered_kwh'] += covered_bill.kwh
                vals['covered_net_gbp'] += covered_bill.net
                vals['covered_vat_gbp'] += covered_bill.vat
                vals['covered_gross_gbp'] += covered_bill.gross
                for title in bill_titles:
                    k = 'covered_' + title
                    v = bdown.get(title)

                    if v is not None:
                        if title.endswith('_rate') or title in (
                                'correction_factor', 'calorific_value',
                                'units'):
                            if k not in vals:
                                vals[k] = set()
                            vals[k].add(v)
                        else:
                            try:
                                vals[k] += v
                            except KeyError:
                                vals[k] = v
                            except TypeError:
                                raise BadRequest(
                                    "Problem with key " + str(k) +
                                    " and value " + str(v) + " for existing " +
                                    str(vals[k]))

                    if title in (
                            'correction_factor', 'calorific_value',
                            'units_code', 'units_factor'):
                        if k not in vals:
                            vals[k] = set()
                        for g_read in covered_bill.g_reads:
                            if title in ('units_code', 'units_factor'):
                                g_units = g_read.g_units
                                if title == 'units_code':
                                    v = g_units.code
                                else:
                                    v = g_units.factor
                            else:
                                v = getattr(g_read, title)
                            vals[k].add(v)

            for g_era in sess.query(GEra).filter(
                    GEra.g_supply == g_supply,
                    GEra.start_date <= vals['covered_finish'],
                    or_(
                        GEra.finish_date == null(),
                        GEra.finish_date >= vals['covered_start'])).distinct():
                site = sess.query(Site).join(SiteGEra).filter(
                    SiteGEra.is_physical == true(),
                    SiteGEra.g_era == g_era).one()

                if vals['covered_start'] > g_era.start_date:
                    chunk_start = vals['covered_start']
                else:
                    chunk_start = g_era.start_date

                if hh_before(vals['covered_finish'], g_era.finish_date):
                    chunk_finish = vals['covered_finish']
                else:
                    chunk_finish = g_era.finish_date

                data_source = chellow.g_engine.GDataSource(
                    sess, chunk_start, chunk_finish, forecast_date, g_era,
                    report_context, covered_primary_bill)

                vbf(data_source)

                for k, v in data_source.bill.items():
                    vk = 'virtual_' + k
                    try:
                        if isinstance(v, set):
                            vals[vk].update(v)
                        else:
                            vals[vk] += v
                    except KeyError:
                        vals[vk] = v
                    except TypeError as detail:
                        raise BadRequest(
                            "For key " + str(vk) + " and value " + str(v) +
                            ". " + str(detail))

            vals['supply_name'] = g_supply.name
            vals['site_code'] = site.code
            vals['site_name'] = site.name

            for k, v in vals.items():
                vals[k] = csv_make_val(v)

            for i, title in enumerate(titles):
                if title.startswith('difference_'):
                    try:
                        covered_val = float(vals[titles[i - 2]])
                        virtual_val = float(vals[titles[i - 1]])
                        vals[title] = covered_val - virtual_val
                    except KeyError:
                        vals[title] = None

            csv_writer.writerow(
                [
                    (vals.get(k) if vals.get(k) is not None else '')
                    for k in titles])

    except BadRequest as e:
        tmp_file.write("Problem: " + e.description)
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        tmp_file.write("Problem " + msg)
    finally:
        try:
            if sess is not None:
                sess.close()
        except:
            tmp_file.write("\nProblem closing session.")
        finally:
            tmp_file.close()
            os.rename(running_name, finished_name)
Пример #36
0
def content(scenario_props, base_name, site_id, supply_id, user, compression,
            site_codes, now):
    report_context = {}

    try:
        comp = report_context["computer"]
    except KeyError:
        comp = report_context["computer"] = {}

    try:
        rate_cache = comp["rates"]
    except KeyError:
        rate_cache = comp["rates"] = {}

    try:
        ind_cont = report_context["contract_names"]
    except KeyError:
        ind_cont = report_context["contract_names"] = {}

    sess = None
    try:
        sess = Session()

        start_year = scenario_props["scenario_start_year"]
        start_month = scenario_props["scenario_start_month"]
        months = scenario_props["scenario_duration"]

        month_pairs = list(
            c_months_u(start_year=start_year,
                       start_month=start_month,
                       months=months))
        start_date_utc = month_pairs[0][0]
        finish_date_utc = month_pairs[-1][-1]

        base_name.append(
            hh_format(start_date_utc).replace(" ", "_").replace(":",
                                                                "").replace(
                                                                    "-", ""))

        base_name.append("for")
        base_name.append(str(months))
        base_name.append("months")

        if "forecast_from" in scenario_props:
            forecast_from = scenario_props["forecast_from"]
        else:
            forecast_from = None

        if forecast_from is None:
            forecast_from = chellow.computer.forecast_date()
        else:
            forecast_from = to_utc(forecast_from)

        sites = sess.query(Site).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append("site")
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append("supply")
            base_name.append(str(supply.id))
            sites = sites.join(SiteEra).join(Era).filter(Era.supply == supply)
        if len(site_codes) > 0:
            base_name.append("sitecodes")
            sites = sites.filter(Site.code.in_(site_codes))

        running_name, finished_name = chellow.dloads.make_names(
            "_".join(base_name) + ".ods", user)

        rf = open(running_name, "wb")
        site_rows = []
        era_rows = []
        normal_read_rows = []

        for rate_script in scenario_props.get("local_rates", []):
            contract_id = rate_script["contract_id"]
            try:
                cont_cache = rate_cache[contract_id]
            except KeyError:
                cont_cache = rate_cache[contract_id] = {}

            try:
                rate_script_start = rate_script["start_date"]
            except KeyError:
                raise BadRequest(
                    f"Problem in the scenario properties. Can't find the 'start_date' "
                    f"key of the contract {contract_id} in the 'local_rates' map."
                )

            try:
                rate_script_start = rate_script["start_date"]
            except KeyError:
                raise BadRequest(
                    f"Problem in the scenario properties. Can't find the 'start_date' "
                    f"key of the contract {contract_id} in the 'local_rates' map."
                )

            props = PropDict("scenario properties", rate_script["script"])
            for dt in hh_range(report_context, rate_script_start,
                               rate_script["finish_date"]):
                cont_cache[dt] = props

        for rate_script in scenario_props.get("industry_rates", []):
            contract_name = rate_script["contract_name"]
            try:
                cont_cache = ind_cont[contract_name]
            except KeyError:
                cont_cache = ind_cont[contract_name] = {}

            rfinish = rate_script["finish_date"]
            if rfinish is None:
                raise BadRequest(
                    f"For the industry rate {contract_name} the finish_date can't be "
                    f"null.")
            for dt in hh_range(report_context, rate_script["start_date"],
                               rfinish):
                cont_cache[dt] = PropDict("scenario properties",
                                          rate_script["script"])

        era_maps = scenario_props.get("era_maps", {})
        by_hh = scenario_props.get("by_hh", False)

        scenario_hh = scenario_props.get("hh_data", {})

        era_header_titles = [
            "creation-date",
            "imp-mpan-core",
            "imp-supplier-contract",
            "exp-mpan-core",
            "exp-supplier-contract",
            "metering-type",
            "source",
            "generator-type",
            "supply-name",
            "msn",
            "pc",
            "site-id",
            "site-name",
            "associated-site-ids",
            "month",
        ]
        site_header_titles = [
            "creation-date",
            "site-id",
            "site-name",
            "associated-site-ids",
            "month",
            "metering-type",
            "sources",
            "generator-types",
        ]
        summary_titles = [
            "import-net-kwh",
            "export-net-kwh",
            "import-gen-kwh",
            "export-gen-kwh",
            "import-3rd-party-kwh",
            "export-3rd-party-kwh",
            "displaced-kwh",
            "used-kwh",
            "used-3rd-party-kwh",
            "import-net-gbp",
            "export-net-gbp",
            "import-gen-gbp",
            "export-gen-gbp",
            "import-3rd-party-gbp",
            "export-3rd-party-gbp",
            "displaced-gbp",
            "used-gbp",
            "used-3rd-party-gbp",
            "billed-import-net-kwh",
            "billed-import-net-gbp",
            "billed-supplier-import-net-gbp",
            "billed-dc-import-net-gbp",
            "billed-mop-import-net-gbp",
        ]

        title_dict = {}
        for cont_type, con_attr in (
            ("mop", Era.mop_contract),
            ("dc", Era.dc_contract),
            ("imp-supplier", Era.imp_supplier_contract),
            ("exp-supplier", Era.exp_supplier_contract),
        ):
            titles = []
            title_dict[cont_type] = titles
            conts = (sess.query(Contract).join(con_attr).join(
                Era.supply).join(Source).filter(
                    Era.start_date <= finish_date_utc,
                    or_(Era.finish_date == null(),
                        Era.finish_date >= start_date_utc),
                ).distinct().order_by(Contract.id))
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, "virtual_bill_titles")
                if title_func is None:
                    raise Exception(
                        f"For the contract {cont.name} there doesn't seem to be a "
                        f"'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        tpr_query = (sess.query(Tpr).join(MeasurementRequirement).join(
            Ssc).join(Era).filter(
                Era.start_date <= finish_date_utc,
                or_(Era.finish_date == null(),
                    Era.finish_date >= start_date_utc),
            ).order_by(Tpr.code).distinct())
        for tpr in tpr_query.filter(Era.imp_supplier_contract != null()):
            for suffix in ("-kwh", "-rate", "-gbp"):
                title_dict["imp-supplier"].append(tpr.code + suffix)
        for tpr in tpr_query.filter(Era.exp_supplier_contract != null()):
            for suffix in ("-kwh", "-rate", "-gbp"):
                title_dict["exp-supplier"].append(tpr.code + suffix)

        era_rows.append(
            era_header_titles + summary_titles + [None] +
            ["mop-" + t for t in title_dict["mop"]] + [None] +
            ["dc-" + t for t in title_dict["dc"]] + [None] +
            ["imp-supplier-" + t for t in title_dict["imp-supplier"]] +
            [None] + ["exp-supplier-" + t for t in title_dict["exp-supplier"]])
        site_rows.append(site_header_titles + summary_titles)

        sites = sites.all()
        deltas = {}
        normal_reads = set()

        for site in sites:
            deltas[site.id] = _make_site_deltas(sess, report_context, site,
                                                scenario_hh, forecast_from,
                                                supply_id)

        for month_start, month_finish in month_pairs:
            for site in sites:
                if by_hh:
                    sf = [(d, d) for d in hh_range(report_context, month_start,
                                                   month_finish)]
                else:
                    sf = [(month_start, month_finish)]

                for start, finish in sf:
                    try:
                        normal_reads = normal_reads | _process_site(
                            sess,
                            report_context,
                            forecast_from,
                            start,
                            finish,
                            site,
                            deltas[site.id],
                            supply_id,
                            era_maps,
                            now,
                            summary_titles,
                            title_dict,
                            era_rows,
                            site_rows,
                        )
                    except BadRequest as e:
                        raise BadRequest(
                            f"Site Code {site.code}: {e.description}")

            normal_read_rows = [[
                "mpan_core", "date", "msn", "type", "registers"
            ]]
            for mpan_core, r in sorted(list(normal_reads)):
                row = [mpan_core, r.date, r.msn, r.type] + list(r.reads)
                normal_read_rows.append(row)

            write_spreadsheet(rf, compression, site_rows, era_rows,
                              normal_read_rows)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows,
                          normal_read_rows)
    except BaseException:
        msg = traceback.format_exc()
        sys.stderr.write(msg + "\n")
        site_rows.append(["Problem " + msg])
        write_spreadsheet(rf, compression, site_rows, era_rows,
                          normal_read_rows)
    finally:
        if sess is not None:
            sess.close()
        try:
            rf.close()
            os.rename(running_name, finished_name)
        except BaseException:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names("error.txt", user)
            ef = open(r_name, "w")
            ef.write(msg + "\n")
            ef.close()
Пример #37
0
def to_vt(dmy):
    dt = to_date(dmy)
    if dt is None:
        return ''
    else:
        return to_utc(dt + Timedelta(hours=23, minutes=30)).strftime(FMT)
Пример #38
0
def get_date(row, name, datemode):
    dt = get_date_naive(row, name, datemode)
    return dt if dt is None else to_utc(to_ct(dt))
Пример #39
0
def get_start_date(row, name, datemode):
    return to_utc(get_date_ct(row, name, datemode))
Пример #40
0
def _parse_row(row, row_index, datemode, title_row):
    val = get_value(row, "Meter Point")
    try:
        mpan_core = parse_mpan_core(str(int(val)))
    except ValueError as e:
        raise BadRequest(
            "Can't parse the MPAN core in column 'Meter Point' with value '"
            + str(val)
            + "' : "
            + str(e)
        )

    bill_period = get_value(row, "Bill Period")
    if "-" in bill_period:
        period_start_naive, period_finish_naive = [
            Datetime.strptime(v, "%Y-%m-%d") for v in bill_period.split(" - ")
        ]
        period_start = to_utc(to_ct(period_start_naive))
        period_finish = to_utc(to_ct(period_finish_naive + relativedelta(days=1) - HH))
    else:
        period_start, period_finish = None, None

    from_date = get_date(row, "From Date", datemode)
    if from_date is None:
        if period_start is None:
            raise BadRequest("Can't find a bill start date.")
        else:
            from_date = period_start

    to_date_naive = get_date_naive(row, "To Date", datemode)
    if to_date_naive is None:
        if period_finish is None:
            raise BadRequest("Can't find a bill finish date.")
        else:
            to_date = period_finish

    else:
        to_date = to_utc(to_ct(to_date_naive + relativedelta(days=1) - HH))

    issue_date = get_date(row, "Bill Date", datemode)
    bill_number = get_value(row, "Bill Number")
    bill = {
        "bill_type_code": "N",
        "kwh": Decimal(0),
        "vat": Decimal("0.00"),
        "net": Decimal("0.00"),
        "reads": [],
        "breakdown": {"raw_lines": [str(title_row)]},
        "account": mpan_core,
        "issue_date": issue_date,
        "start_date": from_date,
        "finish_date": to_date,
        "mpan_core": mpan_core,
    }
    bd = bill["breakdown"]

    usage = get_dec(row, "Usage")
    # usage_units = get_value(row, 'Usage Unit')
    price = get_dec(row, "Price")
    amount = get_dec(row, "Amount")
    product_item_name = get_value(row, "Product Item Name")
    rate_name = get_value(row, "Rate Name")
    if product_item_name == "Renewables Obligation (RO)":
        bill["kwh"] += round(usage, 2)
    description = get_value(row, "Description")
    product_class = get_value(row, "Product Item Class")
    if description in ("Standard VAT@20%", "Reduced VAT@5%"):
        bill["vat"] += round(amount, 2)
    else:
        bill["net"] += round(amount, 2)

        path = [product_class, description, rate_name]
        names = _find_names(ELEM_MAP, path)

        duos_avail_prefix = "DUoS Availability ("
        duos_excess_avail_prefix = "DUoS Excess Availability ("

        if description.startswith("DUoS Availability Adjustment "):
            _bd_add(bd, "duos-availability-gbp", amount)
        elif description.startswith("DUoS Availability"):
            if description.startswith(duos_avail_prefix):
                _bd_add(
                    bd,
                    "duos-availability-kva",
                    int(description[len(duos_avail_prefix) : -5]),
                )
            _bd_add(bd, "duos-availability-days", usage)
            _bd_add(bd, "duos-availability-rate", price)
            _bd_add(bd, "duos-availability-gbp", amount)
        elif description.startswith("DUoS Excess Availability"):
            if description.startswith(duos_excess_avail_prefix):
                kva = int(description[len(duos_excess_avail_prefix) : -5])
                _bd_add(bd, "duos-excess-availability-kva", kva)
            _bd_add(bd, "duos-excess-availability-days", usage)
            _bd_add(bd, "duos-excess-availability-rate", price)
            _bd_add(bd, "duos-excess-availability-gbp", amount)
        elif description.startswith("BSUoS Black Start "):
            _bd_add(bd, "black-start-gbp", amount)
        elif description.startswith("BSUoS Reconciliation - "):
            if usage is not None:
                _bd_add(bd, "bsuos-nbp-kwh", usage)
            if price is not None:
                _bd_add(bd, "bsuos-rate", price)
            _bd_add(bd, "bsuos-gbp", amount)
        elif description.startswith("FiT Rec - "):
            _bd_add(bd, "fit-gbp", amount)
        elif description.startswith("FiT Reconciliation "):
            _bd_add(bd, "fit-gbp", amount)
        elif description.startswith("CfD FiT Rec - ") or description.startswith(
            "CfD FiT Reconciliation"
        ):
            _bd_add(bd, "cfd-fit-gbp", amount)
        elif description.startswith("Flex"):
            _bd_add(bd, "reconciliation-gbp", amount)
        elif description.startswith("Legacy TNUoS Reversal "):
            _bd_add(bd, "triad-gbp", amount)
        elif description.startswith("Hand Held Read -"):
            _bd_add(bd, "meter-rental-gbp", amount)
        elif description.startswith("RO Mutualisation "):
            _bd_add(bd, "ro-gbp", amount)
        elif description.startswith("OOC MOP - "):
            _bd_add(bd, "meter-rental-gbp", amount)
        elif description.startswith("KVa Adjustment "):
            _bd_add(bd, "duos-availability-gbp", amount)
        elif names is not None:
            for elem_k, elem_v in zip(names, (amount, price, usage)):
                if elem_k is not None:
                    _bd_add(bd, elem_k, elem_v)
        else:
            raise BadRequest(
                f"For the path {path} the parser can't work out the element."
            )

    reference = str(bill_number) + "_" + str(row_index + 1)
    for k, v in tuple(bd.items()):
        if isinstance(v, set):
            bd[k] = list(v)
        elif k.endswith("-gbp"):
            reference += "_" + k[:-4]

    bill["reference"] = reference
    bill["gross"] = bill["net"] + bill["vat"]
    return bill
Пример #41
0
 def to_date(self, component):
     return to_utc(Datetime.strptime(component, "%y%m%d"))
Пример #42
0
    def run(self):
        while not self.stopped.isSet():
            if self.lock.acquire(False):
                sess = book = sbp_sheet = ssp_sheet = None
                try:
                    sess = Session()
                    self.log("Starting to check System Prices.")
                    # ct_tz = pytz.timezone('Europe/London')
                    contract = Contract.get_non_core_by_name(
                        sess, 'system_price')
                    contract_props = contract.make_properties()

                    if contract_props.get('enabled', False):
                        for rscript in sess.query(RateScript).filter(
                                RateScript.contract == contract).order_by(
                                    RateScript.start_date.desc()):
                            ns = loads(rscript.script)
                            rates = ns['gbp_per_nbp_mwh']
                            if len(rates) == 0:
                                fill_start = rscript.start_date
                                break
                            elif rates[key_format(
                                    rscript.finish_date)]['run'] == 'DF':
                                fill_start = rscript.finish_date + HH
                                break

                        config = Contract.get_non_core_by_name(
                            sess, 'configuration')
                        config_props = config.make_properties()

                        scripting_key = config_props.get(
                            ELEXON_PORTAL_SCRIPTING_KEY_KEY)
                        if scripting_key is None:
                            raise BadRequest(
                                "The property " +
                                ELEXON_PORTAL_SCRIPTING_KEY_KEY +
                                " cannot be found in the configuration "
                                "properties.")
                        url_str = contract_props['url'] + \
                            'file/download/BESTVIEWPRICES_FILE?key=' + \
                            scripting_key

                        self.log("Downloading from " + url_str +
                                 " and extracting data from " +
                                 hh_format(fill_start))

                        url = urllib.parse.urlparse(url_str)
                        if url.scheme == 'https':
                            conn = http.client.HTTPSConnection(
                                url.hostname, url.port)
                        else:
                            conn = http.client.HTTPConnection(
                                url.hostname, url.port)
                        conn.request("GET", url.path + '?' + url.query)

                        res = conn.getresponse()
                        self.log("Received " + str(res.status) + " " +
                                 res.reason)
                        data = res.read()
                        book = xlrd.open_workbook(file_contents=data)
                        sbp_sheet = book.sheet_by_index(1)
                        ssp_sheet = book.sheet_by_index(2)

                        sp_months = []
                        sp_month = None
                        for row_index in range(1, sbp_sheet.nrows):
                            sbp_row = sbp_sheet.row(row_index)
                            ssp_row = ssp_sheet.row(row_index)
                            raw_date = datetime.datetime(*xlrd.xldate_as_tuple(
                                sbp_row[0].value, book.datemode))
                            hh_date_ct = to_ct(raw_date)
                            hh_date = to_utc(hh_date_ct)
                            run_code = sbp_row[1].value
                            for col_idx in range(2, 52):
                                if hh_date >= fill_start:
                                    sbp_val = sbp_row[col_idx].value
                                    if sbp_val != '':
                                        if hh_date.day == 1 and \
                                                hh_date.hour == 0 and \
                                                hh_date.minute == 0:
                                            sp_month = {}
                                            sp_months.append(sp_month)
                                        ssp_val = ssp_row[col_idx].value
                                        sp_month[hh_date] = {
                                            'run': run_code,
                                            'sbp': sbp_val,
                                            'ssp': ssp_val
                                        }
                                hh_date += HH
                        self.log("Successfully extracted data.")
                        last_date = sorted(sp_months[-1].keys())[-1]
                        if last_date.month == (last_date + HH).month:
                            del sp_months[-1]
                        if 'limit' in contract_props:
                            sp_months = sp_months[0:1]
                        for sp_month in sp_months:
                            sorted_keys = sorted(sp_month.keys())
                            month_start = sorted_keys[0]
                            month_finish = sorted_keys[-1]
                            rs = sess.query(RateScript).filter(
                                RateScript.contract == contract,
                                RateScript.start_date == month_start).first()
                            if rs is None:
                                self.log(
                                    "Adding a new rate script starting at " +
                                    hh_format(month_start) + ".")

                                latest_rs = sess.query(RateScript).filter(
                                    RateScript.contract == contract).\
                                    order_by(RateScript.start_date.desc()). \
                                    first()

                                contract.update_rate_script(
                                    sess, latest_rs, latest_rs.start_date,
                                    month_finish, loads(latest_rs.script))
                                rs = contract.insert_rate_script(
                                    sess, month_start, {})
                                sess.flush()
                            script = {
                                'gbp_per_nbp_mwh':
                                dict((key_format(k), v)
                                     for k, v in sp_month.items())
                            }
                            self.log("Updating rate script starting at " +
                                     hh_format(month_start) + ".")
                            contract.update_rate_script(
                                sess, rs, rs.start_date, rs.finish_date,
                                script)
                            sess.commit()
                    else:
                        self.log("The automatic importer is disabled. To "
                                 "enable it, edit the contract properties to "
                                 "set 'enabled' to True.")

                except BaseException:
                    self.log("Outer problem " + traceback.format_exc())
                    sess.rollback()
                finally:
                    book = sbp_sheet = ssp_sheet = None
                    self.lock.release()
                    self.log("Finished checking System Price rates.")
                    if sess is not None:
                        sess.close()

            self.going.wait(24 * 60 * 60)
            self.going.clear()
Пример #43
0
def test_to_utc():
    dt_utc = to_utc(ct_datetime(2014, 9, 6, 1))
    assert dt_utc == Datetime(2014, 9, 6, 0, 0, tzinfo=utc)
Пример #44
0
def _to_date(component):
    return to_utc(to_ct(Datetime.strptime(component, "%y%m%d")))