Exemplo n.º 1
0
def import_nemweb_scada() -> None:
    engine_mysql = get_mysql_engine()
    logger.info("Connected to database.")

    year = NEMWEB_DISPATCH_OLD_MIN_DATE.year
    month = NEMWEB_DISPATCH_OLD_MIN_DATE.month

    for dt in date_series(
            date_trunc(NEMWEB_DISPATCH_OLD_MAX_DATE, "month"),
            date_trunc(NEMWEB_DISPATCH_OLD_MIN_DATE, "month"),
            interval=relativedelta(months=1),
            reverse=True,
    ):
        query = get_scada_old_query(year=dt.year, month=dt.month)

        with engine_mysql.connect() as c:
            logger.debug(query)

            results_raw = list(c.execute(query))

            logger.info("Got {} rows for year {} and month {}".format(
                len(results_raw), year, month))

        results_schema = [
            DispatchUnitSolutionOld(
                trading_interval=i[0],
                facility_code=i[1],
                generated=i[2],
            ) for i in results_raw
        ]

        insert_scada_records(results_schema)

    return None
Exemplo n.º 2
0
def crawl_apvi_forecasts(crawler: CrawlerDefinition,
                         last_crawled: bool = True,
                         limit: bool = False,
                         latest: bool = False) -> ControllerReturn:
    """Runs the APVI crawl definition"""
    apvi_return = ControllerReturn()

    if crawler.latest:
        apvi_forecast_return = run_apvi_crawl()
        return apvi_forecast_return

    # run the entire date range
    else:
        for date in date_series(get_today_nem().date(),
                                length=crawler.limit,
                                reverse=True):
            apvi_forecast_return = run_apvi_crawl(date)
            apvi_return.processed_records += apvi_forecast_return.processed_records
            apvi_return.total_records += apvi_forecast_return.total_records
            apvi_return.inserted_records += apvi_forecast_return.inserted_records
            apvi_return.errors += apvi_forecast_return.errors

            if not apvi_return.server_latest or apvi_return.server_latest < apvi_forecast_return.server_latest:
                apvi_return.server_latest = apvi_forecast_return.server_latest

    return apvi_return
Exemplo n.º 3
0
def get_aemo_raw_values(persist: bool = True) -> AEMOTableSet:
    """Get raw aemo values from nemweb"""

    urls: list[str] = []

    for date_series_date in date_series(start=COMPARISON_START_DATE,
                                        end=COMPARISON_END_DATE +
                                        timedelta(days=1)):
        dfmt = date_series_date.strftime("%Y%m%d")
        urls.append(
            f"https://nemweb.com.au/Reports/Archive/DispatchIS_Reports/PUBLIC_DISPATCHIS_{dfmt}.zip"
        )

    trading_is_urls = [
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20211219_20211225.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20211226_20220101.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220102_20220108.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220109_20220115.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220116_20220122.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220123_20220129.zip",
        "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220130_20220205.zip",
        # older
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220206_20220212.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220213_20220219.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220220_20220226.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220227_20220305.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220306_20220312.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220213_20220219.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220220_20220226.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220227_20220305.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220306_20220312.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220313_20220319.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220320_20220326.zip",
        # "https://nemweb.com.au/Reports/Archive/TradingIS_Reports/PUBLIC_TRADINGIS_20220327_20220402.zip",
    ]

    # urls += trading_is_urls

    ts = AEMOTableSet()

    # This will iterate through the URLs, parse them and add them to the AEMOTableSet
    for u in trading_is_urls:
        ts = parse_aemo_url_optimized(u,
                                      table_set=ts,
                                      persist_to_db=False,
                                      values_only=True)

    if not persist:
        return ts

    with open(TABLESET_PATH, "w") as fh:
        fh.write(ts.json(indent=4))

    logger.info(f"Wrote table_set to file {TABLESET_PATH}")

    return ts
Exemplo n.º 4
0
    def test_date_series(self):
        # defaults to now going back 30 days
        series = list(date_series(reverse=True))

        date_today = datetime.now().date()
        date_29_days_ago = date_today - timedelta(days=29)

        assert len(series) == 30, "There are 30 dates"
        assert series[0] == date_today, "First entry is today"
        assert series[29] == date_29_days_ago, "Last entry is 29 days ago"
Exemplo n.º 5
0
def main() -> None:

    json_data_ungrouped = []

    for req_date in date_series(TODAY, length=1, reverse=True):

        logger.info("Getting data for {}".format(req_date))

        records = requests.post(
            APVI_DATA_URI,
            data={
                "day": req_date.strftime(APVI_DATE_QUERY_FORMAT)
            },
        ).json()

        postcode_gen = records["postcode"]
        postcode_capacity = records["postcodeCapacity"]

        for record in postcode_gen:
            for state, prefix in STATE_POSTCODE_PREFIXES.items():

                if state not in ["WA"]:
                    continue

                interval_time = datetime.fromisoformat(record["ts"].replace(
                    "Z", "+10:00"))

                # interval_time = parse_date(
                #     record["ts"],
                #     dayfirst=False,
                #     yearfirst=True,
                # )

                # interval_time = interval_time.astimezone(NetworkNEM.get_timezone())

                generated_state = sum([
                    float(v) / 100 * postcode_capacity[k]
                    for k, v in record.items()
                    if k.startswith(prefix) and v and k in postcode_capacity
                ])

                generated_swis = sum([
                    float(v) / 100 * postcode_capacity[k]
                    for k, v in record.items()
                    if k[:2] in SWIS_CODES and v and k in postcode_capacity
                ])

                json_data_ungrouped.append({
                    "trading_interval": interval_time,
                    "swis": generated_swis,
                    state: generated_state,
                })

    json_grouped_date = {}

    from pprint import pprint

    pprint(json_data_ungrouped)

    for date_grouped_str, v in groupby(
            json_data_ungrouped, lambda k: str(k["trading_interval"].date())):
        if date_grouped_str not in json_grouped_date:
            json_grouped_date[date_grouped_str] = []

        json_grouped_date[date_grouped_str] += list(v)

    json_grouped_summed = {}

    for grouped_date, trading_day in json_grouped_date.items():
        json_grouped_summed = {}

        if grouped_date not in json_grouped_summed:
            json_grouped_summed[grouped_date] = {}

        # print(json_grouped_summed)

        for trading_interval in trading_day:
            for k, v in trading_interval.items():
                if k in ["trading_interval"]:
                    continue

                if k not in json_grouped_summed[grouped_date]:
                    json_grouped_summed[grouped_date][k] = 0

                json_grouped_summed[grouped_date][k] += v
                json_grouped_summed[grouped_date][k] = round(
                    json_grouped_summed[grouped_date][k], 2)

    json_serialized = json.dumps(json_grouped_summed,
                                 indent=4,
                                 cls=OpenNEMJSONEncoder)