Beispiel #1
0
def export_flows() -> None:
    date_range = get_scada_range(network=NetworkNEM)

    interchange_stat = StatExport(
        stat_type=StatType.power,
        priority=PriorityType.live,
        country="au",
        date_range=date_range,
        network=NetworkNEM,
        interval=NetworkNEM.get_interval(),
        period=human_to_period("7d"),
    )

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=interchange_stat.network,
        interval=interchange_stat.interval,
        period=interchange_stat.period,
    )

    stat_set = power_flows_network_week(time_series=time_series)

    if stat_set:
        write_output(
            f"v3/stats/au/{interchange_stat.network.code}/flows/7d.json",
            stat_set)
Beispiel #2
0
def __trading_energy_generator_hour(
        df: pd.DataFrame,
        hour: datetime,
        duid_id: str,
        power_field: str = "generated") -> pd.DataFrame:
    return_cols = []

    t_start = hour.replace(minute=5)

    for TI in range(2):
        t_i = t_start + timedelta(0, 1800 * TI)
        t_f = t_start + timedelta(0, 1800 * (TI + 1))

        _query = f"'{t_i}' <= trading_interval <= '{t_f}' and facility_code == '{duid_id}'"

        d_ti = df.query(_query)

        energy_value = None
        trading_interval = None

        # rooftop 30m intervals - AEMO rooftop is going to go in a separate network
        # so this won't be required
        if (d_ti.fueltech_id.all()
                == "solar_rooftop") and (d_ti[power_field].count() == 1):
            energy_value = d_ti[power_field].sum() / 2
            # ooofff - this delta comes back off as part of NEM offset
            trading_interval = d_ti.index[0] + timedelta(minutes=5)
        # interpolate if it isn't padded out
        elif d_ti[power_field].count() != 7:
            index_interpolated = pd.date_range(start=t_i,
                                               end=t_f,
                                               freq="5min",
                                               tz=NetworkNEM.get_timezone())

            d_ti = d_ti.reset_index()
            d_ti = d_ti.set_index("trading_interval")
            d_ti = d_ti.reindex(index_interpolated)
            d_ti["facility_code"] = duid_id
            d_ti[power_field] = d_ti[power_field].replace(np.NaN, 0)

            if d_ti[power_field].count() != 7:
                logger.warn("Interpolated frame didn't match generated count")

        try:
            if d_ti.fueltech_id.all() != "solar_rooftop":
                energy_value = __trapezium_integration(d_ti, power_field)
                trading_interval = d_ti.index[-2]
        except ValueError as e:
            logger.error("Error with {} at {} {}: {}".format(
                duid_id, t_i, t_f, e))

        if not d_ti.index.empty:
            return_cols.append({
                "trading_interval": trading_interval,
                "network_id": "NEM",
                "facility_code": duid_id,
                "eoi_quantity": energy_value,
            })

    return return_cols
Beispiel #3
0
def get_today_nem() -> datetime:
    """Gets today in NEM time"""
    now = datetime.now()

    now_no_microseconds = now.replace(microsecond=0)

    # NEM is fixed offset at +10
    nem_tz = NetworkNEM.get_fixed_offset()

    nem_dt = now_no_microseconds.astimezone(nem_tz)

    return nem_dt
Beispiel #4
0
    def _validate_trading_interval(cls, value: Any) -> datetime:
        interval_time = parse_date(
            value,
            dayfirst=False,
            yearfirst=True,
        )

        if not interval_time:
            raise Exception(f"Invalid APVI forecast interval: {value}")

        # All APVI data is in NEM time
        interval_time = interval_time.astimezone(
            NetworkNEM.get_timezone())  # type: ignore

        return interval_time
Beispiel #5
0
from opennem.schema.network import NetworkNEM


@pytest.mark.parametrize(
    [
        "ts", "start_expected", "end_expected", "interval_expected",
        "length_expected"
    ],
    [
        # Test 1 hour inclusive
        (
            TimeSeries(
                start=datetime.fromisoformat("2021-01-15 12:00:00+00:00"),
                end=datetime.fromisoformat("2021-01-15 13:00:00+00:00"),
                network=NetworkNEM,
                interval=NetworkNEM.get_interval(),
                period=human_to_period("1h"),
            ),
            # Also testing timezone shift from UTC to NEM time
            datetime.fromisoformat("2021-01-15 22:00:00+10:00"),
            datetime.fromisoformat("2021-01-15 23:00:00+10:00"),
            "5m",
            13,  # number of 5 minute intervals in an hour _inclusive_
        ),
        # Test 1 week inclusive
        (
            TimeSeries(
                start=datetime.fromisoformat("1997-05-05 12:45:00+00:00"),
                end=datetime.fromisoformat("2021-01-15 12:45:00+00:00"),
                network=NetworkNEM,
                interval=NetworkNEM.get_interval(),
Beispiel #6
0
def _trading_interval_timezone(dt: datetime) -> datetime:
    return dt.replace(tzinfo=NetworkNEM.get_timezone())
Beispiel #7
0
    def process_item(self, item, spider=None):
        if "records" not in item:
            logger.error("Invalid return response")

        records = item["records"]

        is_latest = False
        record_date = None

        if "meta" in item:
            if "is_latest" in item["meta"]:
                is_latest = item["meta"]["is_latest"]

            if "record_date" in item["meta"]:
                record_date = item["meta"]["record_date"]

        if "postcode" not in records:
            logger.error("No postcode data")

        if "installations" not in records:
            logger.error("No postcode data")

        if "postcodeCapacity" not in records:
            logger.error("No postcode capacity data")

        postcode_gen = records["postcode"]
        postcode_capacity = records["postcodeCapacity"]
        installations = records["installations"]

        engine = get_database_engine()
        session = SessionLocal()

        records_to_store = []

        created_at = datetime.now()
        created_by = ""

        if spider and hasattr(spider, "name"):
            created_by = spider.name

        for record in postcode_gen:
            for state, prefix in STATE_POSTCODE_PREFIXES.items():
                facility_code = "{}_{}_{}".format(ROOFTOP_CODE, "apvi".upper(),
                                                  state.upper())

                interval_time = parse_date(
                    record["ts"],
                    dayfirst=False,
                    yearfirst=True,
                )

                interval_time = interval_time.astimezone(
                    NetworkNEM.get_timezone())

                generated = sum([
                    float(v) / 100 * postcode_capacity[k]
                    for k, v in record.items() if k.startswith(prefix) and v
                    and k in postcode_capacity and k[:2] not in WA_NON_SWIS
                ])

                if not generated:
                    continue

                __record = {
                    "created_by": created_by,
                    "created_at": created_at,
                    "network_id": "APVI",
                    "trading_interval": interval_time,
                    "facility_code": facility_code,
                    "generated": generated,
                }

                records_to_store.append(__record)

        STATE_CAPACITIES = {}

        if is_latest:
            # temporariy only run getting capacities on latest
            logger.info("Updating capacities on %s", record_date)

            for postcode_prefix, capacity_val in postcode_capacity.items():
                for state, prefix in STATE_POSTCODE_PREFIXES.items():
                    if state not in STATE_CAPACITIES:
                        STATE_CAPACITIES[state] = 0

                    if postcode_prefix.startswith(prefix):
                        STATE_CAPACITIES[state] += capacity_val

            for state, state_capacity in STATE_CAPACITIES.items():
                facility_code = "{}_{}_{}".format(ROOFTOP_CODE, "apvi".upper(),
                                                  state.upper())

                state_facility: Facility = (session.query(Facility).filter_by(
                    code=facility_code).one_or_none())

                if not state_facility:
                    raise Exception("Could not find rooftop facility for %s",
                                    facility_code)

                state_facility.capacity_registered = state_capacity

                if state.lower() in installations:
                    state_number_units = installations[state.lower()]
                    state_facility.unit_number = state_number_units

                session.add(state_facility)
                session.commit()

        if len(records_to_store) < 1:
            return 0

        stmt = insert(FacilityScada).values(records_to_store)
        stmt.bind = engine
        stmt = stmt.on_conflict_do_update(
            index_elements=[
                "trading_interval", "network_id", "facility_code",
                "is_forecast"
            ],
            set_={
                "generated": stmt.excluded.generated,
                "created_by": stmt.excluded.created_by,
            },
        )

        try:
            session.execute(stmt)
            session.commit()
        except Exception as e:
            logger.error("Error: {}".format(e))
        finally:
            session.close()

        return len(records_to_store)
Beispiel #8
0
def export_electricitymap() -> None:
    date_range = get_scada_range(network=NetworkNEM)

    if not date_range.start:
        raise Exception("Could not get a scada range in EM export")

    interchange_stat = StatExport(
        stat_type=StatType.power,
        priority=PriorityType.live,
        country="au",
        date_range=date_range,
        network=NetworkNEM,
        interval=NetworkNEM.get_interval(),
        period=human_to_period("1d"),
    )

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=interchange_stat.network,
        networks=[NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill],
        interval=interchange_stat.interval,
        period=interchange_stat.period,
    )

    stat_set = power_flows_network_week(time_series=time_series)

    if not stat_set:
        raise Exception("No flow results for electricitymap export")

    em_set = OpennemDataSet(type="custom",
                            version=get_version(),
                            created_at=datetime.now(),
                            data=[])

    INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"]

    for ds in stat_set.data:
        if ds.code in INVERT_SETS:
            ds_inverted = invert_flow_set(ds)
            em_set.data.append(ds_inverted)
            logging.info("Inverted {}".format(ds.code))
        else:
            em_set.data.append(ds)

    for region in ["NSW1", "QLD1", "VIC1", "TAS1", "SA1"]:
        power_set = power_week(
            time_series,
            region,
            include_capacities=True,
            include_code=False,
            networks_query=[
                NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill
            ],
        )

        if power_set:
            em_set.append_set(power_set)

    date_range = get_scada_range(network=NetworkWEM)

    # WEM custom
    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=NetworkWEM,
        networks=[NetworkWEM, NetworkAPVI],
        interval=NetworkWEM.get_interval(),
        period=interchange_stat.period,
    )

    power_set = power_week(
        time_series,
        "WEM",
        include_capacities=True,
        networks_query=[NetworkWEM, NetworkAPVI],
        include_code=False,
    )

    if power_set:
        em_set.append_set(power_set)

    write_output("v3/clients/em/latest.json", em_set)
Beispiel #9
0
def main():

    json_data_ungrouped = []

    for req_date in date_series(TODAY, length=3, reverse=True):
        records = requests.post(
            APVI_DATA_URI,
            data={
                "day": req_date.strftime(APVI_DATE_QUERY_FORMAT)
            },
        ).json()

        postcode_gen = records["postcode"]
        postcode_capacity = records["postcodeCapacity"]

        for record in postcode_gen:
            for state, prefix in STATE_POSTCODE_PREFIXES.items():

                if state not in ["WA"]:
                    continue

                interval_time = parse_date(
                    record["ts"],
                    dayfirst=False,
                    yearfirst=True,
                )

                interval_time = interval_time.astimezone(
                    NetworkNEM.get_timezone())

                generated_state = sum([
                    float(v) / 100 * postcode_capacity[k]
                    for k, v in record.items()
                    if k.startswith(prefix) and v and k in postcode_capacity
                ])

                generated_swis = sum([
                    float(v) / 100 * postcode_capacity[k]
                    for k, v in record.items()
                    if k[:2] in SWIS_CODES and v and k in postcode_capacity
                ])

                json_data_ungrouped.append({
                    "trading_interval": interval_time,
                    "swis": generated_swis,
                    state: generated_state,
                })

    json_grouped_date = {}

    for date_grouped_str, v in groupby(
            json_data_ungrouped, lambda k: str(k["trading_interval"].date())):
        if date_grouped_str not in json_grouped_date:
            json_grouped_date[date_grouped_str] = []

        json_grouped_date[date_grouped_str] += list(v)

    json_grouped_summed = {}

    for grouped_date, trading_day in json_grouped_date.items():
        json_grouped_summed = {}

        if grouped_date not in json_grouped_summed:
            print(grouped_date)
            json_grouped_summed[grouped_date] = {}

        print(json_grouped_summed)

        for trading_interval in trading_day:
            for k, v in trading_interval.items():
                if k in ["trading_interval"]:
                    continue

                if k not in json_grouped_summed[grouped_date]:
                    json_grouped_summed[grouped_date][k] = 0

                json_grouped_summed[grouped_date][k] += v
                json_grouped_summed[grouped_date][k] = round(
                    json_grouped_summed[grouped_date][k], 2)

    json_serialized = json.dumps(json_grouped_summed,
                                 indent=4,
                                 cls=OpenNEMJSONEncoder)

    print(json_serialized)