예제 #1
0
def power_flows_week(
    time_series: TimeSeries,
    network_region_code: str,
) -> Optional[OpennemDataSet]:
    engine = get_database_engine()

    query = interconnector_power_flow(time_series=time_series,
                                      network_region=network_region_code)

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    if len(row) < 1:
        raise Exception("No results from query: {}".format(query))

    imports = [
        DataQueryResult(interval=i[0],
                        result=i[2],
                        group_by="imports" if len(i) > 1 else None)
        for i in row
    ]

    exports = [
        DataQueryResult(interval=i[0],
                        result=i[3],
                        group_by="exports" if len(i) > 1 else None)
        for i in row
    ]

    result = stats_factory(
        imports,
        # code=network_region_code or network.code,
        network=time_series.network,
        period=human_to_period("7d"),
        interval=human_to_interval("5m"),
        units=get_unit("power"),
        region=network_region_code,
        fueltech_group=True,
    )

    if not result:
        raise Exception("No results")

    result_exports = stats_factory(
        exports,
        # code=network_region_code or network.code,
        network=time_series.network,
        period=human_to_period("7d"),
        interval=human_to_interval("5m"),
        units=get_unit("power"),
        region=network_region_code,
        fueltech_group=True,
    )

    result.append_set(result_exports)

    return result
예제 #2
0
def gov_stats_cpi() -> Optional[OpennemDataSet]:
    engine = get_database_engine()

    query = country_stats_query(StatTypes.CPI)

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in row
    ]

    if len(stats) < 1:
        logger.error("No results for gov_stats_cpi returing blank set")
        return None

    result = stats_factory(
        stats,
        code="au.cpi",
        network=NetworkNEM,
        interval=human_to_interval("1Q"),
        period=human_to_period("all"),
        units=get_unit("cpi"),
        group_field="gov",
    )

    return result
예제 #3
0
    def values(self) -> List[Tuple[datetime, float]]:
        interval_obj = get_human_interval(self.interval)
        interval_def = human_to_interval(self.interval)
        inclusive = False
        dt = self.start

        if interval_def.interval < 1440:
            inclusive = True

        # return as list rather than generate
        timeseries_data = []

        # rewind back one interval
        if inclusive:
            # dt -= interval_obj
            pass

        for v in self.data:
            timeseries_data.append((dt, v))
            dt = dt + interval_obj

        # @TODO do some sanity checking here
        # if dt != self.last:
        #     raise Exception(
        #         "Mismatch between start, last and interval size. Got {} and {}".format(
        #             dt, self.last
        #         )
        #     )

        return timeseries_data
예제 #4
0
def power_unit(
    unit_code: str = Query(..., description="Unit code"),
    network_code: str = Query(..., description="Network code"),
    interval_human: str = Query(None, description="Interval"),
    period_human: str = Query("7d", description="Period"),
    engine=Depends(get_database_engine),
) -> OpennemDataSet:

    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval_human:
        interval_human = "{}m".format(network.interval_size)

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)
    units = get_unit("power")

    stats = []

    facility_codes = [normalize_duid(unit_code)]

    query = power_facility_query(facility_codes, network.code, interval=interval, period=period)

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Unit stats not found",
        )

    output = stats_factory(
        stats,
        code=unit_code,
        interval=interval,
        period=period,
        units=units,
        network=network,
    )

    if not output:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No stats found",
        )

    return output
예제 #5
0
def energy_network_api(
    engine=Depends(get_database_engine),
    network_code: str = Query(..., description="Network code"),
    interval_human: str = Query("1d", description="Interval"),
    period_human: str = Query("1Y", description="Period"),
) -> OpennemDataSet:

    results = []

    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval_human:
        interval_human = "{}m".format(network.interval_size)

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)
    units = get_unit("energy_giga")

    query = energy_network(network=network, interval=interval, period=period)

    with engine.connect() as c:
        results = list(c.execute(query))

    if len(results) < 1:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No results")

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in results
    ]

    result = stats_factory(
        stats,
        code=network.code,
        network=network,
        interval=interval,
        period=period,
        units=units,
    )

    if not result:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results found",
        )

    return result
예제 #6
0
파일: energy.py 프로젝트: opennem/opennem
def get_date_range(network: NetworkSchema) -> DatetimeRange:
    date_range = get_scada_range(network=NetworkNEM)

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        interval=human_to_interval("1d"),
        period=human_to_period("all"),
        network=network,
    )

    return time_series.get_range()
예제 #7
0
def demand_week(
    time_series: TimeSeries,
    network_region_code: Optional[str],
    networks_query: Optional[List[NetworkSchema]] = None,
) -> Optional[OpennemDataSet]:
    engine = get_database_engine()

    query = network_demand_query(
        time_series=time_series,
        network_region=network_region_code,
        networks_query=networks_query,
    )

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    if len(row) < 1:
        logger.error(
            "No results from network_demand_query with {}".format(time_series))
        return None

    demand = [
        DataQueryResult(interval=i[0],
                        result=i[2],
                        group_by="demand" if len(i) > 1 else None) for i in row
    ]

    result = stats_factory(
        demand,
        # code=network_region_code or network.code,
        network=time_series.network,
        period=human_to_period("7d"),
        interval=human_to_interval("5m"),
        units=get_unit("demand"),
        region=network_region_code,
    )

    if not result:
        logger.error(
            "No results from network_demand_query with {}".format(time_series))
        return None

    return result
예제 #8
0
def get_power_example() -> OpennemDataSet:
    network = network_from_network_code("NEM")
    interval = human_to_interval("5m")
    units = get_unit("power")
    period = human_to_period("7d")
    network_region_code = "NSW1"

    test_rows = []

    dt = datetime.fromisoformat("2021-01-15 10:00:00")

    for ft in ["coal_black", "coal_brown"]:
        for v in range(0, 3):
            test_rows.append([dt, ft, v])
            dt = dt + timedelta(minutes=5)

    stats = [
        DataQueryResult(interval=i[0],
                        result=i[2],
                        group_by=i[1] if len(i) > 1 else None)
        for i in test_rows
    ]

    assert len(stats) == 6, "Should have 6 stats"

    result = stats_factory(
        stats,
        code=network_region_code or network.code,
        network=network,
        interval=interval,
        period=period,
        units=units,
        region=network_region_code,
        fueltech_group=True,
    )

    if not result:
        raise Exception("Bad unit test data")

    with open("power-nsw1.json", "w") as fh:
        fh.write(result.json(indent=4))

    return result
예제 #9
0
def test_nem_nsw1_201512() -> None:
    query_result = get_test_fixture("emissions_nem_nsw1_201512.csv")

    region_flows = [
        RegionFlowEmissionsResult(
            interval=i["trading_interval"],
            flow_from=i["flow_from"],
            flow_to=i["flow_to"],
            energy=i["energy"],
            flow_from_emissions=i["flow_from_emissions"],
            flow_to_emissions=i["flow_to_emissions"],
        ) for i in query_result
    ]

    interval = human_to_interval("1M")

    flows = net_flows_emissions("NSW1", region_flows, interval)

    print(flows)
예제 #10
0
def energy_station(
    engine=Depends(get_database_engine),
    session: Session = Depends(get_database_session),
    network_code: str = Query(..., description="Network code"),
    station_code: str = Query(..., description="Station Code"),
    interval: str = Query(None, description="Interval"),
    period: str = Query("7d", description="Period"),
) -> OpennemDataSet:
    """
    Get energy output for a station (list of facilities)
    over a period
    """

    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval:
        # @NOTE rooftop data is 15m
        if station_code.startswith("ROOFTOP"):
            interval = "15m"
        else:
            interval = "{}m".format(network.interval_size)

    interval_obj = human_to_interval(interval)
    period_obj = human_to_period(period)
    units = get_unit("energy")

    station = (
        session.query(Station)
        .join(Station.facilities)
        .filter(Station.code == station_code)
        .filter(Facility.network_id == network.code)
        .one_or_none()
    )

    if not station:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found")

    if len(station.facilities) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station has no facilities",
        )

    facility_codes = list(set([f.code for f in station.facilities]))

    query = energy_facility_query(
        facility_codes,
        network=network,
        interval=interval_obj,
        period=period_obj,
    )

    logger.debug(query)

    with engine.connect() as c:
        row = list(c.execute(query))

    if len(row) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    results_energy = [
        DataQueryResult(interval=i[0], group_by=i[1], result=i[2] if len(i) > 1 else None)
        for i in row
    ]

    results_market_value = [
        DataQueryResult(interval=i[0], group_by=i[1], result=i[3] if len(i) > 1 else None)
        for i in row
    ]

    results_emissions = [
        DataQueryResult(interval=i[0], group_by=i[1], result=i[4] if len(i) > 1 else None)
        for i in row
    ]

    if len(results_energy) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    stats = stats_factory(
        stats=results_energy,
        units=units,
        network=network,
        interval=interval_obj,
        period=period_obj,
        code=station_code,
        include_group_code=True,
    )

    if not stats:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    stats_market_value = stats_factory(
        stats=results_market_value,
        units=get_unit("market_value"),
        network=network,
        interval=interval_obj,
        period=period_obj,
        code=station_code,
        include_group_code=True,
    )

    stats.append_set(stats_market_value)

    stats_emissions = stats_factory(
        stats=results_emissions,
        units=get_unit("emissions"),
        network=network,
        interval=interval_obj,
        period=period_obj,
        code=network.code.lower(),
        include_group_code=True,
    )

    stats.append_set(stats_emissions)

    return stats
예제 #11
0
def power_network_fueltech_api(
    network_code: str = Query(..., description="Network code"),
    network_region: str = Query(None, description="Network region"),
    interval_human: str = Query(None, description="Interval"),
    period_human: str = Query("7d", description="Period"),
    engine=Depends(get_database_engine),
) -> OpennemDataSet:
    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval_human:
        interval_human = "{}m".format(network.interval_size)

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)
    units = get_unit("power")

    scada_range = get_scada_range(network=network)

    query = power_network_fueltech(
        network=network,
        interval=interval,
        period=period,
        network_region=network_region,
        scada_range=scada_range,
    )

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    result = stats_factory(
        stats,
        code=network.code,
        network=network,
        interval=interval,
        period=period,
        units=units,
        region=network_region,
        fueltech_group=True,
    )

    if not result:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results found",
        )

    return result
예제 #12
0
def power_station(
    station_code: str = Query(..., description="Station code"),
    network_code: str = Query(..., description="Network code"),
    since: datetime = Query(None, description="Since time"),
    interval_human: str = Query(None, description="Interval"),
    period_human: str = Query("7d", description="Period"),
    session: Session = Depends(get_database_session),
    engine=Depends(get_database_engine),
) -> OpennemDataSet:
    if not since:
        since = datetime.now() - human_to_timedelta("7d")

    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval_human:
        # @NOTE rooftop data is 15m
        if station_code.startswith("ROOFTOP"):
            interval_human = "15m"
        else:
            interval_human = "{}m".format(network.interval_size)

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)
    units = get_unit("power")

    station = (
        session.query(Station)
        .join(Facility)
        .filter(Station.code == station_code)
        .filter(Facility.network_id == network.code)
        .filter(Station.approved.is_(True))
        .one_or_none()
    )

    if not station:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found")

    facility_codes = list(set([f.code for f in station.facilities]))

    stats = []

    query = power_facility_query(facility_codes, network=network, interval=interval, period=period)

    logger.debug(query)

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    result = stats_factory(
        stats,
        code=station_code,
        network=network,
        interval=interval,
        period=period,
        include_group_code=True,
        units=units,
    )

    if not result:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results found",
        )

    return result
예제 #13
0
def power_week(
    time_series: TimeSeries,
    network_region_code: str = None,
    networks_query: Optional[List[NetworkSchema]] = None,
    include_capacities: bool = False,
    include_code: Optional[bool] = True,
) -> Optional[OpennemDataSet]:
    engine = get_database_engine()

    query = power_network_fueltech_query(
        time_series=time_series,
        networks_query=networks_query,
        network_region=network_region_code,
    )

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in row
    ]

    if len(stats) < 1:
        logger.error("No results from power week query with {}".format(time_series))
        return None

    result = stats_factory(
        stats,
        # code=network_region_code or network.code,
        network=time_series.network,
        interval=time_series.interval,
        period=time_series.period,
        units=get_unit("power"),
        region=network_region_code,
        fueltech_group=True,
        include_code=include_code,
    )

    if not result:
        logger.error("No results from power week status factory with {}".format(time_series))
        return None

    if include_capacities and network_region_code:
        region_fueltech_capacities = get_facility_capacities(
            time_series.network, network_region_code
        )

        for ft in result.data:
            if ft.fuel_tech in region_fueltech_capacities:
                ft.x_capacity_at_present = region_fueltech_capacities[ft.fuel_tech]

    # price

    time_series_price = time_series.copy()
    time_series_price.interval = human_to_interval("30m")

    query = price_network_query(
        time_series=time_series_price,
        networks_query=networks_query,
        network_region=network_region_code,
    )

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    stats_price = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in row
    ]

    stats_market_value = stats_factory(
        stats=stats_price,
        code=network_region_code or time_series.network.code.lower(),
        units=get_unit("price_energy_mega"),
        network=time_series.network,
        interval=human_to_interval("30m"),
        region=network_region_code,
        period=time_series.period,
        include_code=include_code,
    )

    result.append_set(stats_market_value)

    # rooftop solar

    time_series_rooftop = time_series.copy()
    time_series_rooftop.interval = human_to_interval("30m")

    query = power_network_rooftop_query(
        time_series=time_series_rooftop,
        networks_query=networks_query,
        network_region=network_region_code,
    )

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    rooftop_power = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in row
    ]

    rooftop = stats_factory(
        rooftop_power,
        # code=network_region_code or network.code,
        network=time_series.network,
        interval=human_to_interval("30m"),
        period=time_series.period,
        units=get_unit("power"),
        region=network_region_code,
        fueltech_group=True,
        include_code=include_code,
        cast_nulls=False,
    )

    # rooftop forecast
    rooftop_forecast = None

    if rooftop and rooftop.data and len(rooftop.data) > 0:
        time_series_rooftop_forecast = time_series_rooftop.copy()
        time_series_rooftop_forecast.start = rooftop.data[0].history.last
        time_series_rooftop_forecast.forecast = True

        query = power_network_rooftop_query(
            time_series=time_series_rooftop_forecast,
            networks_query=networks_query,
            network_region=network_region_code,
            forecast=True,
        )

        with engine.connect() as c:
            logger.debug(query)
            row = list(c.execute(query))

        rooftop_forecast_power = [
            DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
            for i in row
        ]

        rooftop_forecast = stats_factory(
            rooftop_forecast_power,
            # code=network_region_code or network.code,
            network=time_series.network,
            interval=human_to_interval("30m"),
            period=time_series.period,
            units=get_unit("power"),
            region=network_region_code,
            fueltech_group=True,
            include_code=include_code,
            cast_nulls=False,
        )

    if rooftop and rooftop_forecast:
        if (
            hasattr(rooftop, "data")
            and len(rooftop.data) > 0
            and rooftop_forecast.data
            and len(rooftop_forecast.data) > 0
        ):
            rooftop.data[0].forecast = rooftop_forecast.data[0].history

    result.append_set(rooftop)

    return result
예제 #14
0
파일: map.py 프로젝트: opennem/opennem
def generate_weekly_export_map() -> StatMetadata:
    """
    Generate export map for weekly power series

    @TODO deconstruct this into separate methods and schema
    ex. network.get_scada_range(), network_region.get_bom_station() etc.
    """
    session = get_scoped_session()

    networks = session.query(Network).filter(
        Network.export_set.is_(True)).all()

    if not networks:
        raise Exception("No networks")

    countries = list(set([network.country for network in networks]))

    _exmap = []

    # Loop countries
    for country in countries:
        # @TODO derive this
        scada_range = get_scada_range(network=NetworkAU,
                                      networks=[NetworkNEM, NetworkWEM])

        if not scada_range:
            raise Exception("Require a scada range for NetworkAU")

        for year, week in week_series(scada_range.end, scada_range.start):
            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.history,
                country=country,
                network=NetworkAU,
                networks=[NetworkNEM, NetworkWEM],
                year=year,
                week=week,
                date_range=date_range_from_week(year, week, NetworkAU),
                interval=human_to_interval("30m"),
                period=human_to_period("7d"),
            )
            _exmap.append(export)

    # Loop networks
    for network in networks:
        network_schema = network_from_network_code(network.code)
        scada_range = get_scada_range(network=network_schema)

        if not scada_range:
            raise Exception("Require a scada range for network: {}".format(
                network.code))

        for year, week in week_series(scada_range.end, scada_range.start):
            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.history,
                country=network.country,
                network=network_schema,
                year=year,
                week=week,
                date_range=date_range_from_week(year, week, NetworkAU),
                interval=human_to_interval(f"{network.interval_size}m"),
                period=human_to_period("7d"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

        # Skip cases like wem/wem where region is supurfelous
        if len(network.regions) < 2:
            continue

        for region in network.regions:
            scada_range = get_scada_range(network=network_schema,
                                          network_region=region.code)

            if not scada_range:
                logger.error(
                    "Require a scada range for network {} and region {}".
                    format(network_schema.code, region.code))
                continue

            for year, week in week_series(scada_range.end, scada_range.start):
                export = StatExport(
                    stat_type=StatType.power,
                    priority=PriorityType.history,
                    country=network.country,
                    network=network_schema,
                    year=year,
                    week=week,
                    date_range=date_range_from_week(
                        year, week, network_from_network_code(network.code)),
                    interval=human_to_interval(f"{network.interval_size}m"),
                    period=human_to_period("7d"),
                )

                if network.code == "WEM":
                    export.networks = [NetworkWEM, NetworkAPVI]
                    export.network_region_query = "WEM"

                _exmap.append(export)

    export_meta = StatMetadata(date_created=datetime.now(),
                               version=get_version(),
                               resources=_exmap)

    return export_meta
예제 #15
0
         period=human_to_period("7d"),
     ),
     # Also testing timezone shift from UTC to NEM time
     datetime.fromisoformat("2021-01-08 22:45:00+10:00"),
     datetime.fromisoformat("2021-01-15 22:45:00+10:00"),
     "5m",
     2017,  # number of 5 minute intervals in a year
 ),
 # Years
 (
     TimeSeries(
         start=datetime.fromisoformat("1997-05-05 12:45:00+10:00"),
         end=datetime.fromisoformat("2021-01-15 12:45:00+10:00"),
         network=NetworkNEM,
         year=2021,
         interval=human_to_interval("1d"),
         period=human_to_period("1Y"),
     ),
     # Expected
     datetime.fromisoformat("2021-01-01 00:00:00+10:00"),
     datetime.fromisoformat("2021-01-15 00:00:00+10:00"),
     "1d",
     15,
 ),
 (
     TimeSeries(
         start=datetime.fromisoformat("1997-05-05 12:45:00+00:00"),
         end=datetime.fromisoformat("2021-02-15 02:45:00+00:00"),
         network=NetworkNEM,
         year=2021,
         interval=human_to_interval("1d"),
예제 #16
0
def energy_network_fueltech_api(
    network_code: str = Query(None, description="Network code"),
    network_region: str = Query(None, description="Network region"),
    interval_human: str = Query("1d", description="Interval"),
    year: int = Query(None, description="Year to query"),
    period_human: str = Query("1Y", description="Period"),
    engine=Depends(get_database_engine),
) -> OpennemDataSet:
    network = network_from_network_code(network_code)
    interval = human_to_interval(interval_human)

    period_obj: TimePeriod = human_to_period("1Y")

    if period_human:
        period_obj = human_to_period(period_human)

    units = get_unit("energy_giga")

    query = ""

    if year and isinstance(year, int):
        period_obj = human_to_period("1Y")

        if year > datetime.now().year or year < 1996:
            raise HTTPException(
                status_code=status.HTTP_406_NOT_ACCEPTABLE,
                detail="Not a valid year",
            )

        scada_range = get_scada_range(network=network)

        query = energy_network_fueltech_year(
            network=network,
            interval=interval,
            year=year,
            network_region=network_region,
            scada_range=scada_range,
        )
    elif period_obj and period_obj.period_human == "all":
        scada_range = get_scada_range(network=network)

        query = energy_network_fueltech_all(
            network=network,
            network_region=network_region,
            scada_range=scada_range,
        )
    else:
        query = energy_network_fueltech(
            network=network,
            interval=interval,
            period=period_obj,
            network_region=network_region,
        )

    # print(query)

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None)
        for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Energy stats not found",
        )

    result = stats_factory(
        stats,
        code=network.code,
        network=network,
        interval=interval,
        period=period_obj,
        units=units,
        region=network_region,
        fueltech_group=True,
    )

    if not result:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No stats")

    return result
예제 #17
0
def emission_factor_per_network(  # type: ignore
    engine=Depends(get_database_engine),  # type: ignore
    network_code: str = Query(..., description="Network code"),
    interval: str = Query("30m", description="Interval size"),
) -> Optional[OpennemDataSet]:
    engine = get_database_engine()

    network = None

    try:
        network = network_from_network_code(network_code)
    except Exception:
        raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND)

    interval_obj = human_to_interval(interval)
    period_obj = human_to_period("7d")

    if not interval_obj:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Invalid interval size")

    scada_range = get_scada_range(network=network)

    if not scada_range:
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Could not find a date range",
        )

    if not network:
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Network not found",
        )

    time_series = TimeSeries(
        start=scada_range.start,
        network=network,
        interval=interval_obj,
        period=period_obj,
    )

    query = emission_factor_region_query(time_series=time_series)

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    if len(row) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    emission_factors = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in row
    ]

    result = stats_factory(
        emission_factors,
        network=time_series.network,
        period=time_series.period,
        interval=time_series.interval,
        units=get_unit("emissions_factor"),
        group_field="emission_factor",
        include_group_code=True,
        include_code=True,
    )

    if not result or not result.data:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    return result
예제 #18
0
def export_all_monthly() -> None:
    session = get_scoped_session()

    all_monthly = OpennemDataSet(code="au",
                                 data=[],
                                 version=get_version(),
                                 created_at=datetime.now())

    cpi = gov_stats_cpi()
    all_monthly.append_set(cpi)

    # Iterate networks and network regions
    networks = [NetworkNEM, NetworkWEM]

    for network in networks:
        network_regions = session.query(NetworkRegion).filter(
            NetworkRegion.network_id == network.code).all()

        for network_region in network_regions:
            networks = []

            logging.info(
                "Exporting monthly for network {} and region {}".format(
                    network.code, network_region.code))

            if network_region.code == "WEM":
                networks = [NetworkWEM, NetworkAPVI]

            if network == NetworkNEM:
                networks = [NetworkNEM, NetworkAEMORooftop]

            logger.debug("Running monthlies for {} and {}".format(
                network.code, network_region.code))

            scada_range: ScadaDateRange = get_scada_range(network=network,
                                                          networks=networks,
                                                          energy=True)

            if not scada_range or not scada_range.start:
                logger.error(
                    "Could not get scada range for network {} and energy {}".
                    format(network, True))
                continue

            time_series = TimeSeries(
                start=scada_range.start,
                end=scada_range.end,
                network=network,
                interval=human_to_interval("1M"),
                period=human_to_period("all"),
            )

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=network_region.code,
                networks=networks)
            stat_set.append_set(demand_energy_and_value)

            if network == NetworkNEM:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=networks,
                    network_region_code=network_region.code,
                )
                stat_set.append_set(interconnector_flows)

            all_monthly.append_set(stat_set)

            bom_station = get_network_region_weather_station(
                network_region.code)

            if bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=bom_station,
                        network_region=network_region.code,
                    )
                    all_monthly.append_set(weather_stats)
                except Exception:
                    pass

    write_output("v3/stats/au/all/monthly.json", all_monthly)
예제 #19
0
def export_all_daily(
    networks: List[NetworkSchema] = [NetworkNEM, NetworkWEM],
    network_region_code: Optional[str] = None,
) -> None:
    session = get_scoped_session()

    cpi = gov_stats_cpi()

    for network in networks:
        network_regions = session.query(NetworkRegion).filter_by(
            export_set=True).filter_by(network_id=network.code)

        if network_region_code:
            network_regions = network_regions.filter_by(
                code=network_region_code)

        network_regions = network_regions.all()

        for network_region in network_regions:

            logging.info("Exporting for network {} and region {}".format(
                network.code, network_region.code))

            networks = [
                NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill
            ]

            if network_region.code == "WEM":
                networks = [NetworkWEM, NetworkAPVI]

            scada_range: ScadaDateRange = get_scada_range(network=network,
                                                          networks=networks,
                                                          energy=True)

            if not scada_range or not scada_range.start:
                logger.error(
                    "Could not get scada range for network {} and energy {}".
                    format(network, True))
                continue

            time_series = TimeSeries(
                start=scada_range.start,
                end=scada_range.end,
                network=network,
                interval=human_to_interval("1d"),
                period=human_to_period("all"),
            )

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=network_region.code,
                networks=networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if network == NetworkNEM:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=networks,
                    network_region_code=network_region.code,
                )
                stat_set.append_set(interconnector_flows)

            bom_station = get_network_region_weather_station(
                network_region.code)

            if bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=bom_station,
                        network_region=network_region.code,
                    )
                    stat_set.append_set(weather_stats)
                except Exception:
                    pass

            if cpi:
                stat_set.append_set(cpi)

            write_output(f"v3/stats/au/{network_region.code}/daily.json",
                         stat_set)
예제 #20
0
파일: router.py 프로젝트: opennem/opennem
def station_observations_api(
        station_code: str = Query(None, description="Station code"),
        interval_human: str = Query("15m", description="Interval"),
        period_human: str = Query("7d", description="Period"),
        station_codes: List[str] = [],
        timezone: str = None,
        offset: str = None,
        year: int = None,
        engine=Depends(get_database_engine),
) -> OpennemDataSet:
    units = get_unit("temperature")

    if not interval_human:
        interval_human = "15m"

    if not period_human:
        period_human = "7d"

    if station_code:
        station_codes = [station_code]

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)

    if timezone:
        timezone = ZoneInfo(timezone)

    if offset:
        timezone = get_fixed_timezone(offset)

    query = observation_query(
        station_codes=station_codes,
        interval=interval,
        period=period,
        year=year,
    )

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0],
                        result=i[2],
                        group_by=i[1] if len(i) > 1 else None) for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    result = stats_factory(
        stats=stats,
        units=units,
        interval=interval,
        period=period,
        code="bom",
        group_field="temperature",
    )

    return result
예제 #21
0
파일: dates.py 프로젝트: MarnieShaw/opennem
    def get_range(self) -> DatetimeRange:
        """Return a DatetimeRange from the time series for queries"""
        start = self.start
        end = self.end

        # If its a forward looking forecast
        # jump out early
        if self.forecast:
            start = self.end + timedelta(minutes=self.interval.interval)
            end = self.end + get_human_interval(self.forecast_period)

            start = start.astimezone(self.network.get_fixed_offset())
            end = end.astimezone(self.network.get_fixed_offset())

            return DatetimeRange(start=start, end=end, interval=self.interval)

        # subtract the period (ie. 7d from the end for start if not all)
        if self.period == human_to_period("all"):
            start = date_trunc(start, self.interval.trunc)
            start = start.replace(
                hour=0, minute=0, second=0, tzinfo=self.network.get_fixed_offset()
            )

            # If its all per month take the end of the last month
            if self.interval == human_to_interval("1M"):
                end = date_trunc(get_end_of_last_month(end), "day")
                end = end.replace(
                    hour=23, minute=59, second=59, tzinfo=self.network.get_fixed_offset()
                )

            self.year = None

        else:
            start = self.end - get_human_interval(self.period.period_human)

        if self.year:
            if self.year > end.year:
                raise Exception("Specified year is great than end year")

            start = start.replace(
                year=self.year,
                month=1,
                day=1,
                hour=0,
                minute=0,
                second=0,
                tzinfo=self.network.get_fixed_offset(),
            )

            end = datetime(
                year=self.year,
                month=12,
                day=31,
                hour=23,
                minute=59,
                second=59,
                tzinfo=self.network.get_fixed_offset(),
            )

            if self.year == CUR_YEAR:
                today = datetime.now(tz=self.network.get_fixed_offset())

                end = datetime(
                    year=CUR_YEAR, month=today.month, day=today.day, hour=23, minute=59, second=59
                )

                end = end - timedelta(days=1)

                end = end.replace(tzinfo=self.network.get_fixed_offset())

                if self.end.date() < today.date():
                    end = self.end

        if self.month:
            start = datetime(
                year=self.month.year,
                month=self.month.month,
                day=1,
                hour=0,
                minute=0,
                second=0,
                tzinfo=self.network.get_fixed_offset(),
            )

            end = start + get_human_interval("1M") - timedelta(days=1)

            end = end.replace(
                hour=23,
                minute=59,
                second=59,
            )

        # localize times
        if not start.tzinfo or start.tzinfo != self.network.get_fixed_offset():
            start = start.astimezone(self.network.get_fixed_offset())

        if not end.tzinfo or end.tzinfo != self.network.get_fixed_offset():
            end = end.astimezone(self.network.get_fixed_offset())

        dtr = DatetimeRange(start=start, end=end, interval=self.interval)

        return dtr
예제 #22
0
def price_network_endpoint(
    engine: Engine = Depends(get_database_engine),
    network_code: str = Path(..., description="Network code"),
    network_region: Optional[str] = Query(None, description="Network region code"),
    forecasts: bool = Query(False, description="Include price forecasts"),
) -> OpennemDataSet:
    """Returns network and network region price info for interval which defaults to network
    interval size

    Args:
        engine ([type], optional): Database engine. Defaults to Depends(get_database_engine).

    Raises:
        HTTPException: No results

    Returns:
        OpennemData: data set
    """
    engine = get_database_engine()

    network = None

    try:
        network = network_from_network_code(network_code)
    except Exception:
        raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND)

    interval_obj = human_to_interval("5m")
    period_obj = human_to_period("1d")

    scada_range = get_balancing_range(network=network, include_forecasts=forecasts)

    if not scada_range:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Could not find a date range",
        )

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Network not found",
        )

    time_series = TimeSeries(
        start=scada_range.start,
        network=network,
        interval=interval_obj,
        period=period_obj,
    )

    if network_region:
        time_series.network.regions = [NetworkNetworkRegion(code=network_region)]

    query = network_region_price_query(time_series=time_series)

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    if len(row) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    result_set = [
        DataQueryResult(interval=i[0], result=i[3], group_by=i[2] if len(i) > 1 else None)
        for i in row
    ]

    result = stats_factory(
        result_set,
        network=time_series.network,
        period=time_series.period,
        interval=time_series.interval,
        units=get_unit("price"),
        group_field="price",
        include_group_code=True,
        include_code=True,
    )

    if not result or not result.data:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    return result
예제 #23
0
def export_all_daily() -> None:
    session = SessionLocal()
    network_regions = session.query(NetworkRegion).all()

    cpi = gov_stats_cpi()

    for network_region in network_regions:
        network = network_from_network_code(network_region.network.code)
        networks = None

        if network_region.code == "WEM":
            networks = [NetworkWEM, NetworkAPVI]

        scada_range: ScadaDateRange = get_scada_range(network=network,
                                                      networks=networks)

        time_series = TimeSeries(
            start=scada_range.start,
            end=scada_range.end,
            network=network,
            interval=human_to_interval("1d"),
            period=human_to_period("all"),
        )

        stat_set = energy_fueltech_daily(
            time_series=time_series,
            networks_query=networks,
            network_region_code=network_region.code,
        )

        if not stat_set:
            continue

        # Hard coded to NEM only atm but we'll put has_interconnectors
        # in the metadata to automate all this
        if network == NetworkNEM:
            interconnector_flows = energy_interconnector_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_flows)

            interconnector_emissions = energy_interconnector_emissions_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_emissions)

        bom_station = get_network_region_weather_station(network_region.code)

        if bom_station:
            weather_stats = weather_daily(
                time_series=time_series,
                station_code=bom_station,
                network_region=network_region.code,
            )
            stat_set.append_set(weather_stats)

        if cpi:
            stat_set.append_set(cpi)

        write_output(f"v3/stats/au/{network_region.code}/daily.json", stat_set)
예제 #24
0
def fueltech_demand_mix(
    engine=Depends(get_database_engine),  # type: ignore
    network_code: str = Query(..., description="Network code"),
) -> OpennemDataSet:
    """Return fueltech proportion of demand for a network

    Args:
        engine ([type], optional): Database engine. Defaults to Depends(get_database_engine).

    Raises:
        HTTPException: No results

    Returns:
        OpennemData: data set
    """
    engine = get_database_engine()

    network = None

    try:
        network = network_from_network_code(network_code)
    except Exception:
        raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND)

    interval_obj = human_to_interval("5m")
    period_obj = human_to_period("1d")

    scada_range = get_scada_range(network=network)

    if not scada_range:
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Could not find a date range",
        )

    if not network:
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail="Network not found",
        )

    time_series = TimeSeries(
        start=scada_range.start,
        network=network,
        interval=interval_obj,
        period=period_obj,
    )

    query = network_fueltech_demand_query(time_series=time_series)

    with engine.connect() as c:
        logger.debug(query)
        row = list(c.execute(query))

    if len(row) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    result_set = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in row
    ]

    result = stats_factory(
        result_set,
        network=time_series.network,
        period=time_series.period,
        interval=time_series.interval,
        units=get_unit("emissions_factor"),
        group_field="emission_factor",
        include_group_code=True,
        include_code=True,
    )

    if not result or not result.data:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results",
        )

    return result
예제 #25
0
def export_all_monthly() -> None:
    session = SessionLocal()
    network_regions = session.query(NetworkRegion).all()

    all_monthly = OpennemDataSet(code="au",
                                 data=[],
                                 version=get_version(),
                                 created_at=datetime.now())

    cpi = gov_stats_cpi()
    all_monthly.append_set(cpi)

    for network_region in network_regions:
        network = network_from_network_code(network_region.network.code)
        networks = None

        if network_region.code == "WEM":
            networks = [NetworkWEM, NetworkAPVI]

        scada_range: ScadaDateRange = get_scada_range(network=network,
                                                      networks=networks)

        time_series = TimeSeries(
            start=scada_range.start,
            end=scada_range.end,
            network=network,
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )

        stat_set = energy_fueltech_daily(
            time_series=time_series,
            networks_query=networks,
            network_region_code=network_region.code,
        )

        if not stat_set:
            continue

        if network == NetworkNEM:
            interconnector_flows = energy_interconnector_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_flows)

            interconnector_emissions = energy_interconnector_emissions_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_emissions)

        all_monthly.append_set(stat_set)

        bom_station = get_network_region_weather_station(network_region.code)

        if bom_station:
            weather_stats = weather_daily(
                time_series=time_series,
                station_code=bom_station,
                network_region=network_region.code,
            )
            all_monthly.append_set(weather_stats)

    write_output("v3/stats/au/all/monthly.json", all_monthly)
예제 #26
0
def stats_factory(
    stats: List[DataQueryResult],
    units: UnitDefinition,
    interval: TimeInterval,
    period: Optional[TimePeriod] = None,
    network: Optional[NetworkSchema] = None,
    timezone: Optional[Union[timezone, str]] = None,
    code: Optional[str] = None,
    region: Optional[str] = None,
    include_group_code: bool = False,
    fueltech_group: Optional[bool] = False,
    group_field: Optional[str] = None,
    data_id: Optional[str] = None,
    localize: Optional[bool] = True,
    include_code: Optional[bool] = True,
    cast_nulls: Optional[bool] = True,
) -> Optional[OpennemDataSet]:
    """
    Takes a list of data query results and returns OpennemDataSets

    @TODO optional groupby field
    @TODO multiple groupings / slight refactor

    """

    if network:
        timezone = network.get_timezone()

    group_codes = list(set([i.group_by for i in stats if i.group_by]))

    stats_grouped = []

    for group_code in group_codes:

        data_grouped: Dict[datetime, Any] = dict()

        for stat in stats:
            if stat.group_by != group_code:
                continue

            if stat.interval not in data_grouped:
                data_grouped[stat.interval] = None

            # if stat.result:
            data_grouped[stat.interval] = stat.result

        data_sorted = OrderedDict(sorted(data_grouped.items()))

        data_value = list(data_sorted.values())

        # Skip null series
        if len([i for i in data_value if i]) == 0:
            continue

        # @TODO possible bring this back
        # Skip zero series
        # if sum([i for i in data_value if i]) == 0:
        # continue

        # Cast trailing nulls
        if (not units.name.startswith("temperature") or
            (units.cast_nulls is True)) and (cast_nulls is True):
            data_value = cast_trailing_nulls(data_value)

        data_trimmed = dict(zip(data_sorted.keys(), data_value))

        data_trimmed = trim_nulls(data_trimmed)

        # Find start/end dates
        dates = list(data_trimmed.keys())

        if not dates:
            return None

        start = min(dates)
        end = max(dates)

        # should probably make sure these are the same TZ
        if localize:
            if timezone and not is_aware(start):
                start = make_aware(start, timezone)

            if timezone and not is_aware(end):
                end = make_aware(end, timezone)

            if timezone and localize and network and network.offset:
                tz = network.get_timezone()

                if tz:
                    start = start.astimezone(tz)
                    end = end.astimezone(tz)

        # Everything needs a timezone even flat dates
        if network and timezone and not is_aware(start):
            start = start.replace(tzinfo=network.get_fixed_offset())

        if network and timezone and not is_aware(end):
            end = end.replace(tzinfo=network.get_fixed_offset())

        # @TODO compose this and make it generic - some intervals
        # get truncated.
        # trunc the date for days and months
        if interval == human_to_interval("1d"):
            start = date_trunc(start, truncate_to="day")
            end = date_trunc(end, truncate_to="day")

        if interval == human_to_interval("1M"):
            start = date_trunc(start, truncate_to="month")
            end = date_trunc(end, truncate_to="month")

        # free
        dates = []

        history = OpennemDataHistory(
            start=start,
            last=end,
            interval=interval.interval_human,
            data=data_trimmed.values(),
        )

        data = OpennemData(
            data_type=units.unit_type,
            units=units.unit,
            # interval=interval,
            # period=period,
            history=history,
        )

        if include_code:
            data.code = group_code

        if network:
            data.network = network.code.lower()

        # *sigh* - not the most flexible model
        # @TODO fix this schema and make it more flexible
        if fueltech_group:
            data.fuel_tech = group_code

            data_comps = [
                # @NOTE disable for now since FE doesn't
                # support it
                network.country if network else None,
                network.code.lower() if network else None,
                region.lower()
                if region and region.lower() != network.code.lower() else None,
                "fuel_tech",
                group_code,
                units.unit_type,
            ]

            data.id = ".".join(i for i in data_comps if i)
            # @TODO make this an alias
            data.type = units.unit_type

        if group_field:
            group_fields = []

            # setattr(data, group_field, group_code)

            if network:
                group_fields.append(network.country.lower())
                group_fields.append(network.code.lower())

            if region:
                if region.lower() != network.code.lower():
                    group_fields.append(region.lower())

            if units.name_alias:
                group_fields.append(units.name_alias)

            elif units.unit_type:
                group_fields.append(units.unit_type)

            if group_code and include_group_code:
                group_fields.append(group_code)
                group_fields.append(group_field)

            data.id = ".".join([f for f in group_fields if f])
            data.type = units.unit_type

        if data_id:
            data.id = data_id

        if not data.id:
            _id_list = []

            # @NOTE disable for now since FE doesn't
            # support it
            # network.country if network else None,

            if network:
                _id_list.append(network.code.lower())

            if region and (region.lower() != network.code.lower()):
                _id_list.append(region.lower())

            if group_code:
                _id_list.append(group_code.lower())

            if units and units.name_alias:
                _id_list.append(units.name_alias)
            elif units and units.name:
                _id_list.append(units.name)

            data.id = ".".join([f for f in _id_list if f])
            data.type = units.unit_type

        if region:
            data.region = region

        stats_grouped.append(data)

    dt_now = datetime.now()

    if network:
        dt_now = dt_now.astimezone(network.get_timezone())

    # @NOTE this should probably be
    # country.network.region
    if not code:
        if network:
            code = network.code

        if region:
            code = region

    stat_set = OpennemDataSet(
        type=units.unit_type,
        data=stats_grouped,
        created_at=dt_now,
        version=get_version(),
    )

    if include_code:
        stat_set.code = code

    if network:
        stat_set.network = network.code

    if region:
        stat_set.region = region

    return stat_set
예제 #27
0
def price_network_region_api(
    engine=Depends(get_database_engine),
    network_code: str = Query(..., description="Network code"),
    network_region_code: str = Query(..., description="Region code"),
    interval_human: str = Query(None, description="Interval"),
    period_human: str = Query("7d", description="Period"),
    year: Optional[int] = None,
) -> OpennemDataSet:
    network = network_from_network_code(network_code)

    if not network:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No such network",
        )

    if not interval_human:
        interval_human = "{}m".format(network.interval_size)

    interval = human_to_interval(interval_human)

    period_obj = None

    if period_human:
        period_obj = human_to_period(period_human)

    units = get_unit("price")

    scada_range = get_scada_range(network=network)

    if period_obj and period_obj.period_human == "all" and interval.interval_human == "1M":
        query = price_network_monthly(
            network=network,
            network_region_code=network_region_code,
            scada_range=scada_range,
        )
    else:
        query = price_network_region(
            network=network,
            network_region_code=network_region_code,
            interval=interval,
            period=period_obj,
            scada_range=scada_range,
            year=year,
        )

    with engine.connect() as c:
        results = list(c.execute(query))

    if len(results) < 1:
        raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No data found")

    stats = [
        DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None)
        for i in results
    ]

    result = stats_factory(
        stats,
        code=network.code,
        region=network_region_code,
        network=network,
        interval=interval,
        period=period_obj,
        units=units,
        group_field="price",
    )

    if not result:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="No results found",
        )

    return result
예제 #28
0
def export_energy(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export energy stats from the export map


    """
    if not stats:
        export_map = get_export_map().get_by_stat_type(StatType.energy)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    CURRENT_YEAR = datetime.now().year

    logger.info(f"Running export_energy with {len(stats)} stats")

    for energy_stat in stats:
        if energy_stat.stat_type != StatType.energy:
            continue

        # @FIX trim to NEM since it's the one with the shortest
        # data time span.
        # @TODO find a better and more flexible way to do this in the
        # range method
        date_range_networks = energy_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=energy_stat.network,
            networks=date_range_networks,
            energy=True)

        if not date_range:
            logger.error(
                "Skipping - Could not get date range for energy {} {}".format(
                    energy_stat.network, date_range_networks))
            continue

        logger.debug("Date range is: {} {} => {}".format(
            energy_stat.network.code, date_range.start, date_range.end))

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=energy_stat.network,
            year=energy_stat.year,
            interval=energy_stat.interval,
            period=human_to_period("1Y"),
        )

        if energy_stat.year:

            if latest and energy_stat.year != CURRENT_YEAR:
                continue

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=energy_stat.network_region,
                networks=energy_stat.networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

            if energy_stat.bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=energy_stat.bom_station,
                        network_region=energy_stat.network_region,
                    )
                    stat_set.append_set(weather_stats)
                except NoResults as e:
                    logger.info("No results for weather result: {}".format(e))
                except Exception as e:
                    logger.error("weather_stat exception: {}".format(e))
                    pass
            else:
                logger.info("Stat set has no bom station")

            write_output(energy_stat.path, stat_set)

        elif energy_stat.period and energy_stat.period.period_human == "all" and not latest:
            time_series.period = human_to_period("all")
            time_series.interval = human_to_interval("1M")
            time_series.year = None

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=energy_stat.network_region,
                networks=energy_stat.networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

            if energy_stat.bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=energy_stat.bom_station,
                        network_region=energy_stat.network_region,
                    )
                    stat_set.append_set(weather_stats)
                except NoResults as e:
                    logger.info("No weather results: {}".format(e))
                except Exception:
                    pass

            write_output(energy_stat.path, stat_set)
예제 #29
0
def get_export_map() -> StatMetadata:
    """
    Generates a map of all export JSONs

    """
    session = SessionLocal()

    networks = session.query(Network).filter(Network.export_set.is_(True)).all()

    if not networks:
        raise Exception("No networks")

    countries = list(set([network.country for network in networks]))

    _exmap = []

    for country in countries:
        # @TODO derive this
        scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM])

        if not scada_range:
            raise Exception("Require a scada range")

        export = StatExport(
            stat_type=StatType.power,
            priority=PriorityType.live,
            country=country,
            date_range=scada_range,
            network=NetworkAU,
            networks=[NetworkNEM, NetworkWEM],
            interval=NetworkAU.get_interval(),
            period=human_to_period("7d"),
        )

        _exmap.append(export)

        for year in range(
            datetime.now().year,
            scada_range.start.year - 1,
            -1,
        ):
            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.daily,
                country=country,
                date_range=scada_range,
                network=NetworkAU,
                networks=[NetworkNEM, NetworkWEM],
                year=year,
                interval=human_to_interval("1d"),
                period=human_to_period("1Y"),
            )
            _exmap.append(export)

        export = StatExport(
            stat_type=StatType.energy,
            priority=PriorityType.monthly,
            country=country,
            date_range=scada_range,
            network=NetworkAU,
            networks=[NetworkNEM, NetworkWEM],
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )
        _exmap.append(export)

    for network in networks:
        network_schema = network_from_network_code(network.code)
        scada_range = get_scada_range(network=network_schema)
        bom_station = get_network_region_weather_station(network.code)

        export = StatExport(
            stat_type=StatType.power,
            priority=PriorityType.live,
            country=network.country,
            date_range=scada_range,
            network=network_schema,
            bom_station=bom_station,
            interval=network_schema.get_interval(),
            period=human_to_period("7d"),
        )

        if network.code == "WEM":
            export.networks = [NetworkWEM, NetworkAPVI]
            export.network_region_query = "WEM"

        _exmap.append(export)

        if not scada_range:
            raise Exception("Require a scada range")

        for year in range(
            datetime.now().year,
            scada_range.start.year - 1,
            -1,
        ):
            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.daily,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                bom_station=bom_station,
                year=year,
                period=human_to_period("1Y"),
                interval=human_to_interval("1d"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

        export = StatExport(
            stat_type=StatType.energy,
            priority=PriorityType.monthly,
            country=network.country,
            date_range=scada_range,
            network=network_schema,
            bom_station=bom_station,
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )

        if network.code == "WEM":
            export.networks = [NetworkWEM, NetworkAPVI]
            export.network_region_query = "WEM"

        _exmap.append(export)

        # Skip cases like wem/wem where region is supurfelous
        if len(network.regions) < 2:
            continue

        for region in network.regions:
            scada_range = get_scada_range(network=network_schema, network_region=region)
            bom_station = get_network_region_weather_station(region.code)

            if not scada_range:
                raise Exception("Require a scada range")

            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.live,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                network_region=region.code,
                bom_station=bom_station,
                period=human_to_period("7d"),
                interval=network_schema.get_interval(),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

            for year in range(
                datetime.now().year,
                scada_range.start.year - 1,
                -1,
            ):
                export = StatExport(
                    stat_type=StatType.energy,
                    priority=PriorityType.daily,
                    country=network.country,
                    date_range=scada_range,
                    network=network_schema,
                    network_region=region.code,
                    bom_station=bom_station,
                    year=year,
                    period=human_to_period("1Y"),
                    interval=human_to_interval("1d"),
                )
                _exmap.append(export)

            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.monthly,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                network_region=region.code,
                bom_station=bom_station,
                period=human_to_period("all"),
                interval=human_to_interval("1M"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

    export_meta = StatMetadata(
        date_created=datetime.now(), version=get_version(), resources=_exmap
    )

    return export_meta
예제 #30
0
def export_power(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export power stats from the export map


    """

    # Not passed a stat map so go and get one
    if not stats:
        export_map = None

        if priority and priority == PriorityType.history:
            export_map = get_weekly_export_map()

        else:
            export_map = get_export_map().get_by_stat_type(StatType.power)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    output_count: int = 0

    logger.info("Running {}export {} with {} stats".format(
        "latest " if latest else "", priority, len(stats)))

    for power_stat in stats:
        if power_stat.stat_type != StatType.power:
            continue

        if output_count >= 1 and latest:
            return None

        date_range_networks = power_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=power_stat.network, networks=date_range_networks)

        logger.debug("Date range for {}: {} => {}".format(
            power_stat.network.code, date_range.start, date_range.end))

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=power_stat.network,
            year=power_stat.year,
            interval=power_stat.interval,
            period=power_stat.period,
        )

        stat_set = power_week(
            time_series=time_series,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
            networks_query=power_stat.networks,
        )

        if not stat_set:
            logger.info("No power stat set for {} {} {}".format(
                power_stat.period,
                power_stat.networks,
                power_stat.network_region,
            ))
            continue

        demand_set = demand_week(
            time_series=time_series,
            networks_query=power_stat.networks,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
        )

        stat_set.append_set(demand_set)

        if power_stat.network_region:
            flow_set = power_flows_region_week(
                time_series=time_series,
                network_region_code=power_stat.network_region,
            )

            if flow_set:
                stat_set.append_set(flow_set)

        time_series_weather = time_series.copy()
        time_series_weather.interval = human_to_interval("30m")

        if power_stat.bom_station:
            try:
                weather_set = weather_daily(
                    time_series=time_series_weather,
                    station_code=power_stat.bom_station,
                    network_region=power_stat.network_region,
                    include_min_max=False,
                    unit_name="temperature",
                )
                stat_set.append_set(weather_set)
            except Exception:
                pass

        write_output(power_stat.path, stat_set)
        output_count += 1