def get_date_range(network: NetworkSchema) -> DatetimeRange: date_range = get_scada_range(network=NetworkNEM) time_series = TimeSeries( start=date_range.start, end=date_range.end, interval=human_to_interval("1d"), period=human_to_period("all"), network=network, ) return time_series.get_range()
def emission_factor_region_query( time_series: TimeSeries, network_region_code: Optional[str] = None) -> str: __query = """ select f.trading_interval at time zone '{timezone}' as ti, f.network_region, avg(f.emissions_per_mw) * 2 from mv_region_emissions_45d f where f.network_id='{network_id}' and {network_region_query} f.trading_interval <= '{date_max}' and f.trading_interval >= '{date_min}' group by 1, 2 order by 1 asc; """ network_region_query = "" if network_region_code: network_region_query = f"f.network_region='{network_region_code}' and" date_range = time_series.get_range() query = dedent( __query.format( network_region_query=network_region_query, network_id=time_series.network.code, trunc=time_series.interval.trunc, date_max=date_range.end, date_min=date_range.start, timezone=time_series.network.timezone_database, )) return query
def power_network_region_fueltech( network_code: str = Query(..., description="Network code"), network_region_code: str = Query(..., description="Network region code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> OpennemDataSet: network = network_from_network_code(network_code) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") time_series = TimeSeries( start=scada_range.start, month=month, network=network, interval=interval_obj, period=period_obj, ) stat_set = power_week(time_series, network_region_code, include_capacities=True) if not stat_set: raise Exception("No results") return stat_set
def export_flows() -> None: date_range = get_scada_range(network=NetworkNEM) interchange_stat = StatExport( stat_type=StatType.power, priority=PriorityType.live, country="au", date_range=date_range, network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("7d"), ) time_series = TimeSeries( start=date_range.start, end=date_range.end, network=interchange_stat.network, interval=interchange_stat.interval, period=interchange_stat.period, ) stat_set = power_flows_network_week(time_series=time_series) if stat_set: write_output( f"v3/stats/au/{interchange_stat.network.code}/flows/7d.json", stat_set)
def energy_facility_query(time_series: TimeSeries, facility_codes: List[str]) -> str: """ Get Energy for a list of facility codes """ __query = """ select date_trunc('{trunc}', t.trading_day at time zone '{timezone}') as trading_day, t.facility_code, sum(t.energy) as fueltech_energy, sum(t.market_value) as fueltech_market_value, sum(t.emissions) as fueltech_emissions from at_facility_daily t where t.trading_day <= '{date_max}' and t.trading_day >= '{date_min}' and t.facility_code in ({facility_codes_parsed}) group by 1, 2 order by trading_day desc; """ date_range = time_series.get_range() query = dedent( __query.format( facility_codes_parsed=duid_in_case(facility_codes), trunc=time_series.interval.trunc, date_max=date_range.end, date_min=date_range.start, timezone=time_series.network.timezone_database, )) return query
def network_fueltech_demand_query(time_series: TimeSeries) -> str: __query = """ select f.fueltech_id, round(sum(fs.eoi_quantity) / 1000, 2) as energy, sum(bs.demand_total) as demand from facility_scada fs left join balancing_summary bs on bs.trading_interval = fs.trading_interval and bs.network_id = fs.network_id left join facility f on fs.facility_code = f.code join fueltech ft on f.fueltech_id = ft.code where fs.trading_interval >= '{date_min}' and fs.trading_interval < '{date_max}' and fs.network_id = '{network_id}' and f.dispatch_type = 'GENERATOR' group by 1; """ date_range = time_series.get_range() date_min: datetime = date_range.end - timedelta(days=1) query = dedent( __query.format( network_id=time_series.network.code, trunc=time_series.interval.trunc, date_max=date_range.end, date_min=date_min, timezone=time_series.network.timezone_database, )) return query
def price_network_query( time_series: TimeSeries, group_field: str = "bs.network_id", network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select time_bucket_gapfill('{trunc}', bs.trading_interval) as trading_interval, {group_field}, avg(bs.price) as price from balancing_summary bs where bs.trading_interval <= '{date_max}' and bs.trading_interval >= '{date_min}' and {network_query} {network_region_query} 1=1 group by 1, 2 order by 1 desc """ timezone = time_series.network.timezone_database network_region_query = "" if network_region: network_region_query = f"bs.network_region='{network_region}' and " group_field = "bs.network_region" network_query = "bs.network_id IN ({}) and ".format( networks_to_in(networks_query)) if len(networks_query) > 1: group_field = "'AU'" date_max = time_series.get_range().end date_min = time_series.get_range().start query = dedent( __query.format( network_query=network_query, trunc=time_series.interval.interval_sql, network_region_query=network_region_query, timezone=timezone, date_max=date_max, date_min=date_min, group_field=group_field, )) return query
def energy_network_fueltech_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, coalesce_with: Optional[int] = 0, ) -> str: """ Get Energy for a network or network + region based on a year """ if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select date_trunc('{trunc}', t.trading_day), t.fueltech_id, coalesce(sum(t.energy) / 1000, {coalesce_with}) as fueltech_energy, coalesce(sum(t.market_value), {coalesce_with}) as fueltech_market_value, coalesce(sum(t.emissions), {coalesce_with}) as fueltech_emissions from at_facility_daily t left join facility f on t.facility_code = f.code where t.trading_day <= '{date_max}'::date and t.trading_day >= '{date_min}'::date and t.fueltech_id not in ('imports', 'exports', 'interconnector') and {network_query} {network_region_query} 1=1 group by 1, 2 order by 1 desc; """ network_region_query = "" date_range = time_series.get_range() if network_region: network_region_query = f"f.network_region='{network_region}' and" networks_list = networks_to_in(networks_query) network_query = "t.network_id IN ({}) and ".format(networks_list) query = dedent( __query.format( trunc=date_range.interval.trunc, date_min=date_range.start.date(), date_max=date_range.end.date(), network_query=network_query, network_region_query=network_region_query, coalesce_with=coalesce_with, )) return query
def demand_network_region_query(time_series: TimeSeries, network_region: str | None, networks: list[NetworkSchema] = []) -> str: """Get the network demand energy and market_value""" if not networks: networks = [time_series.network] if time_series.network not in networks: networks.append(time_series.network) ___query = """ select date_trunc('{trunc}', trading_day) as trading_day, network_id, {network_region_select} round(sum(demand_energy), 4), round(sum(demand_market_value), 4) from at_network_demand where {network_query} {network_region} trading_day >= '{date_min}'::date and trading_day <= '{date_max}'::date group by 1,2 {group_by} order by 1 asc """ network_region_query = "" network_region_select = f"'{time_series.network.code}' as network_region," group_by = "" if network_region: network_region_query = f"network_region='{network_region}' and" network_region_select = "network_region," group_by = ",3" date_range = time_series.get_range() networks_list = networks_to_in(networks) network_query = "network_id IN ({}) and ".format(networks_list) query = dedent( ___query.format( trunc=time_series.interval.trunc, date_min=date_range.start.date(), date_max=date_range.end.date(), network_id=time_series.network.code, network_region=network_region_query, network_region_select=network_region_select, group_by=group_by, network_query=network_query, )) return query
def energy_network_interconnector_emissions_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: """ Get emissions for a network or network + region based on a year """ if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select t.trading_interval at time zone '{timezone}' as trading_interval, t.flow_from, t.flow_to, t.flow_energy as energy, t.flow_from_emissions, t.flow_to_emissions from vw_region_flow_emissions t where t.trading_interval <= '{date_max}' and t.trading_interval >= '{date_min}' and {network_region_query} 1=1 order by 1 desc """ timezone = time_series.network.timezone_database network_region_query = "" date_range = time_series.get_range() if network_region: network_region_query = f""" (t.flow_from='{network_region}' or t.flow_to='{network_region}') and """ query = dedent( __query.format( timezone=timezone, # trunc=date_range.interval.trunc, date_min=date_range.start, date_max=date_range.end, network_region_query=network_region_query, ) ) return query
def network_demand_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select trading_interval at time zone '{timezone}', network_id, max(demand_total) as demand from balancing_summary bs where bs.trading_interval <= '{date_max}' and bs.trading_interval >= '{date_min}' and {network_query} {network_region_query} 1=1 group by 1, {groups_additional} order by 1 desc; """ group_keys = ["network_id"] network_region_query = "" if network_region: group_keys.append("network_region") network_region_query = f"bs.network_region = '{network_region}' and " groups_additional = ", ".join(group_keys) network_query = "bs.network_id IN ({}) and ".format( networks_to_in(networks_query)) date_max = time_series.get_range().end date_min = time_series.get_range().start query = __query.format( timezone=time_series.network.timezone_database, date_max=date_max, date_min=date_min, network_id=time_series.network.code, network_query=network_query, network_region_query=network_region_query, groups_additional=groups_additional, ) return dedent(query)
def energy_network_flow_query( time_series: TimeSeries, network_region: str, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: """ Get emissions for a network or network + region based on a year """ if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select date_trunc('{trunc}', t.trading_interval) as trading_interval, sum(t.imports_energy), sum(t.exports_energy), abs(sum(t.imports_market_value_rrp)), abs(sum(t.exports_market_value_rrp)) from ( select ei.trading_interval, ei.imports_energy, ei.exports_energy, ei.imports_market_value, ei.exports_market_value, ei.imports_market_value_rrp, ei.exports_market_value_rrp from mv_interchange_energy_nem_region ei where ei.trading_interval <= '{date_max}' and ei.trading_interval >= '{date_min}' and ei.network_region='{network_region}' ) as t group by 1 order by 1 desc """ date_range = time_series.get_range() query = dedent( __query.format( network_region=network_region, trunc=date_range.interval.trunc, date_min=date_range.start, date_max=date_range.end, )) return query
def interconnector_flow_network_regions_query( time_series: TimeSeries, network_region: Optional[str] = None) -> str: """ """ __query = """ select fs.trading_interval at time zone '{timezone}' as trading_interval, f.network_region || '->' || f.interconnector_region_to as flow_region, f.network_region, f.interconnector_region_to, sum(fs.generated) as flow_power from facility_scada fs left join facility f on fs.facility_code = f.code where f.interconnector is True and f.network_id='{network_id}' and fs.trading_interval <= '{date_end}' and fs.trading_interval >= '{date_start}' {region_query} group by 1, 2, 3, 4 order by 1 desc, 2 asc """ region_query = "" if network_region: region_query = f"and f.network_region='{network_region}'" query = __query.format( timezone=time_series.network.timezone_database, network_id=time_series.network.code, region_query=region_query, date_start=time_series.get_range().start, date_end=time_series.get_range().end, ) return dedent(query)
def power_network_region_fueltech( network_code: str = Query(..., description="Network code"), network_region_code: str = Query(..., description="Network region code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> OpennemDataSet: network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") networks = [network] if network == NetworkNEM: networks.append(NetworkAEMORooftop) networks.append(NetworkAEMORooftopBackfill) elif network == NetworkWEM: networks.append(NetworkAPVI) time_series = TimeSeries( start=scada_range.start, month=month, network=network, networks=networks, interval=interval_obj, period=period_obj, ) stat_set = power_week( time_series, network_region_code, include_capacities=True, networks_query=networks ) if not stat_set: raise Exception("No results") return stat_set
def test_schema_timeseries( ts: TimeSeries, start_expected: Union[datetime, date], end_expected: Union[datetime, date], interval_expected: str, length_expected: int, ) -> None: subject_daterange = ts.get_range() assert str( subject_daterange.start) == str(start_expected), "Start string matches" assert subject_daterange.start == start_expected, "Start matches" assert str( subject_daterange.end) == str(end_expected), "End string matches" assert subject_daterange.end == end_expected, "End matches" assert subject_daterange.trunc == interval_expected, "Interval matches"
def power_facility_query( time_series: TimeSeries, facility_codes: List[str], ) -> str: __query = """ select t.trading_interval at time zone '{timezone}', coalesce(avg(t.facility_power), 0), t.facility_code from ( select time_bucket_gapfill('{trunc}', fs.trading_interval) AS trading_interval, coalesce( avg(fs.generated), 0 ) as facility_power, fs.facility_code from facility_scada fs join facility f on fs.facility_code = f.code where fs.trading_interval <= '{date_max}' and fs.trading_interval > '{date_min}' and fs.facility_code in ({facility_codes_parsed}) group by 1, 3 ) as t group by 1, 3 order by 1 desc """ date_range = time_series.get_range() query = __query.format( facility_codes_parsed=duid_in_case(facility_codes), trunc=time_series.interval.interval_sql, period=time_series.period.period_sql, timezone=time_series.network.timezone_database, date_max=date_range.end, date_min=date_range.start, ) return query
def network_region_price_query(time_series: TimeSeries) -> str: __query = """ select time_bucket('{trunc}', bs.trading_interval) as trading_interval, bs.network_id, bs.network_region, coalesce(avg(bs.price), avg(bs.price_dispatch)) as price from balancing_summary bs where bs.trading_interval >= '{date_min}' and bs.trading_interval <= '{date_max}' and bs.network_id = '{network_id}' {network_regions_query} group by 1, 2, 3; """ date_range = time_series.get_range() date_min: datetime = date_range.end - timedelta(days=1) network_regions_query = "" if time_series.network.regions: network_regions: str = ",".join( [f"'{i.code}'" for i in time_series.network.regions]) network_regions_query = f"and bs.network_region IN ({network_regions})" query = dedent( __query.format( network_id=time_series.network.code, trunc=time_series.interval.interval_human, date_max=date_range.end, date_min=date_min, network_regions_query=network_regions_query, )) return query
def emission_factor_per_network( # type: ignore engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), interval: str = Query("30m", description="Interval size"), ) -> Optional[OpennemDataSet]: engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval(interval) period_obj = human_to_period("7d") if not interval_obj: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Invalid interval size") scada_range = get_scada_range(network=network) if not scada_range: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) query = emission_factor_region_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) emission_factors = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( emission_factors, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("emissions_factor"), group_field="emission_factor", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
from opennem.schema.dates import TimeSeries from opennem.schema.network import NetworkNEM @pytest.mark.parametrize( [ "ts", "start_expected", "end_expected", "interval_expected", "length_expected" ], [ # Test 1 hour inclusive ( TimeSeries( start=datetime.fromisoformat("2021-01-15 12:00:00+00:00"), end=datetime.fromisoformat("2021-01-15 13:00:00+00:00"), network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("1h"), ), # Also testing timezone shift from UTC to NEM time datetime.fromisoformat("2021-01-15 22:00:00+10:00"), datetime.fromisoformat("2021-01-15 23:00:00+10:00"), "5m", 13, # number of 5 minute intervals in an hour _inclusive_ ), # Test 1 week inclusive ( TimeSeries( start=datetime.fromisoformat("1997-05-05 12:45:00+00:00"), end=datetime.fromisoformat("2021-01-15 12:45:00+00:00"), network=NetworkNEM,
def power_week( time_series: TimeSeries, network_region_code: str = None, networks_query: Optional[List[NetworkSchema]] = None, include_capacities: bool = False, include_code: Optional[bool] = True, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = power_network_fueltech_query( time_series=time_series, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] if len(stats) < 1: logger.error("No results from power week query with {}".format(time_series)) return None result = stats_factory( stats, # code=network_region_code or network.code, network=time_series.network, interval=time_series.interval, period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, ) if not result: logger.error("No results from power week status factory with {}".format(time_series)) return None if include_capacities and network_region_code: region_fueltech_capacities = get_facility_capacities( time_series.network, network_region_code ) for ft in result.data: if ft.fuel_tech in region_fueltech_capacities: ft.x_capacity_at_present = region_fueltech_capacities[ft.fuel_tech] # price time_series_price = time_series.copy() time_series_price.interval = human_to_interval("30m") query = price_network_query( time_series=time_series_price, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats_price = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] stats_market_value = stats_factory( stats=stats_price, code=network_region_code or time_series.network.code.lower(), units=get_unit("price_energy_mega"), network=time_series.network, interval=human_to_interval("30m"), region=network_region_code, period=time_series.period, include_code=include_code, ) result.append_set(stats_market_value) # rooftop solar time_series_rooftop = time_series.copy() time_series_rooftop.interval = human_to_interval("30m") query = power_network_rooftop_query( time_series=time_series_rooftop, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop = stats_factory( rooftop_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, cast_nulls=False, ) # rooftop forecast rooftop_forecast = None if rooftop and rooftop.data and len(rooftop.data) > 0: time_series_rooftop_forecast = time_series_rooftop.copy() time_series_rooftop_forecast.start = rooftop.data[0].history.last time_series_rooftop_forecast.forecast = True query = power_network_rooftop_query( time_series=time_series_rooftop_forecast, networks_query=networks_query, network_region=network_region_code, forecast=True, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_forecast_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop_forecast = stats_factory( rooftop_forecast_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, cast_nulls=False, ) if rooftop and rooftop_forecast: if ( hasattr(rooftop, "data") and len(rooftop.data) > 0 and rooftop_forecast.data and len(rooftop_forecast.data) > 0 ): rooftop.data[0].forecast = rooftop_forecast.data[0].history result.append_set(rooftop) return result
def export_electricitymap() -> None: date_range = get_scada_range(network=NetworkNEM) if not date_range.start: raise Exception("Could not get a scada range in EM export") interchange_stat = StatExport( stat_type=StatType.power, priority=PriorityType.live, country="au", date_range=date_range, network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("1d"), ) time_series = TimeSeries( start=date_range.start, end=date_range.end, network=interchange_stat.network, networks=[NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill], interval=interchange_stat.interval, period=interchange_stat.period, ) stat_set = power_flows_network_week(time_series=time_series) if not stat_set: raise Exception("No flow results for electricitymap export") em_set = OpennemDataSet(type="custom", version=get_version(), created_at=datetime.now(), data=[]) INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"] for ds in stat_set.data: if ds.code in INVERT_SETS: ds_inverted = invert_flow_set(ds) em_set.data.append(ds_inverted) logging.info("Inverted {}".format(ds.code)) else: em_set.data.append(ds) for region in ["NSW1", "QLD1", "VIC1", "TAS1", "SA1"]: power_set = power_week( time_series, region, include_capacities=True, include_code=False, networks_query=[ NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill ], ) if power_set: em_set.append_set(power_set) date_range = get_scada_range(network=NetworkWEM) # WEM custom time_series = TimeSeries( start=date_range.start, end=date_range.end, network=NetworkWEM, networks=[NetworkWEM, NetworkAPVI], interval=NetworkWEM.get_interval(), period=interchange_stat.period, ) power_set = power_week( time_series, "WEM", include_capacities=True, networks_query=[NetworkWEM, NetworkAPVI], include_code=False, ) if power_set: em_set.append_set(power_set) write_output("v3/clients/em/latest.json", em_set)
def power_network_rooftop_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, forecast: bool = False, ) -> str: """Query power stats""" if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select time_bucket_gapfill('30 minutes', fs.trading_interval) AS trading_interval, ft.code as fueltech_code, {agg_func}(fs.generated) as facility_power from facility_scada fs join facility f on fs.facility_code = f.code join fueltech ft on f.fueltech_id = ft.code where {forecast_query} f.fueltech_id = 'solar_rooftop' and {network_query} {network_region_query} fs.trading_interval <= '{date_max}' and fs.trading_interval >= '{date_min}' group by 1, 2 order by 1 desc """ network_region_query: str = "" wem_apvi_case: str = "" agg_func = "sum" timezone: str = time_series.network.timezone_database forecast_query = f"fs.is_forecast is {forecast} and" if network_region: network_region_query = f"f.network_region='{network_region}' and " if NetworkWEM in networks_query: # silly single case we'll refactor out # APVI network is used to provide rooftop for WEM so we require it # in country-wide totals wem_apvi_case = "or (f.network_id='APVI' and f.network_region='WEM')" agg_func = "max" network_query = "(f.network_id IN ({}) {}) and ".format( networks_to_in(networks_query), wem_apvi_case) date_max = time_series.get_range().end date_min = time_series.get_range().start if forecast: # @TODO work out what in get_range is mashing this date_min = time_series.start + timedelta(minutes=30) date_max = date_min + timedelta(hours=3) query = dedent( __query.format( network_query=network_query, network_region_query=network_region_query, timezone=timezone, date_max=date_max, date_min=date_min, forecast_query=forecast_query, agg_func=agg_func, )) return query
def power_network_fueltech_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: """Query power stats""" if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) __query = """ select time_bucket_gapfill('{trunc}', fs.trading_interval) AS trading_interval, ft.code as fueltech_code, coalesce(sum(fs.generated), 0) as fueltech_power from facility_scada fs join facility f on fs.facility_code = f.code join fueltech ft on f.fueltech_id = ft.code where fs.is_forecast is False and f.fueltech_id is not null and f.fueltech_id not in ({fueltechs_exclude}) and {network_query} {network_region_query} fs.trading_interval <= '{date_max}' and fs.trading_interval >= '{date_min}' {fueltech_filter} group by 1, 2 """ network_region_query: str = "" fueltech_filter: str = "" wem_apvi_case: str = "" timezone: str = time_series.network.timezone_database fueltechs_excluded = ["exports", "imports", "interconnector"] if NetworkNEM in networks_query: fueltechs_excluded.append("solar_rooftop") if network_region: network_region_query = f"f.network_region='{network_region}' and " if NetworkWEM in networks_query: # silly single case we'll refactor out # APVI network is used to provide rooftop for WEM so we require it # in country-wide totals wem_apvi_case = "or (f.network_id='APVI' and f.network_region='WEM')" network_query = "(f.network_id IN ({}) {}) and ".format( networks_to_in(networks_query), wem_apvi_case) date_max = time_series.get_range().end date_min = time_series.get_range().start fueltechs_exclude = ", ".join("'{}'".format(i) for i in fueltechs_excluded) query = dedent( __query.format( network_query=network_query, trunc=time_series.interval.interval_sql, network_region_query=network_region_query, timezone=timezone, date_max=date_max, date_min=date_min, fueltech_filter=fueltech_filter, wem_apvi_case=wem_apvi_case, fueltechs_exclude=fueltechs_exclude, )) return query
def price_network_endpoint( engine: Engine = Depends(get_database_engine), network_code: str = Path(..., description="Network code"), network_region: Optional[str] = Query(None, description="Network region code"), forecasts: bool = Query(False, description="Include price forecasts"), ) -> OpennemDataSet: """Returns network and network region price info for interval which defaults to network interval size Args: engine ([type], optional): Database engine. Defaults to Depends(get_database_engine). Raises: HTTPException: No results Returns: OpennemData: data set """ engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval("5m") period_obj = human_to_period("1d") scada_range = get_balancing_range(network=network, include_forecasts=forecasts) if not scada_range: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) if network_region: time_series.network.regions = [NetworkNetworkRegion(code=network_region)] query = network_region_price_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) result_set = [ DataQueryResult(interval=i[0], result=i[3], group_by=i[2] if len(i) > 1 else None) for i in row ] result = stats_factory( result_set, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("price"), group_field="price", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
def energy_network_fueltech_query( time_series: TimeSeries, network_region: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> str: """ Get Energy for a network or network + region based on a year """ if not networks_query: networks_query = [time_series.network] if time_series.network not in networks_query: networks_query.append(time_series.network) if time_series.interval.interval > 1440: __query = """ select date_trunc('{trunc}', t.trading_day) as trading_month, t.fueltech_id, coalesce(sum(t.fueltech_energy) / 1000 , 0) as fueltech_energy, coalesce(sum(t.fueltech_market_value), 0) as fueltech_market_value, coalesce(sum(t.fueltech_emissions), 0) as fueltech_emissions from (select time_bucket_gapfill('1 day', t.ti_day_aest) as trading_day, t.fueltech_id, sum(t.energy) as fueltech_energy, sum(t.market_value) as fueltech_market_value, sum(t.emissions) as fueltech_emissions from mv_facility_all t where t.ti_day_aest <= '{date_max}' and t.ti_day_aest >= '{date_min}' and t.fueltech_id not in ('imports', 'exports') and {network_query} {network_region_query} 1=1 group by 1, 2) as t group by 1, 2 order by 1 desc; """ else: __query = """ select t.ti_{trunc_name} as trading_day, t.fueltech_id, coalesce(sum(t.energy) / 1000, 0) as fueltech_energy, coalesce(sum(t.market_value), 0) as fueltech_market_value, coalesce(sum(t.emissions), 0) as fueltech_emissions from mv_facility_all t where t.trading_interval <= '{date_max}' and t.trading_interval >= '{date_min}' and t.fueltech_id not in ('imports', 'exports') and {network_query} {network_region_query} 1=1 group by 1, 2 order by trading_day desc; """ network_region_query = "" date_range = time_series.get_range() if network_region: network_region_query = f"t.network_region='{network_region}' and" networks_list = networks_to_in(networks_query) network_query = "t.network_id IN ({}) and ".format(networks_list) trunc_name = "{}_{}".format( time_series.interval.trunc, time_series.network.timezone_database ).lower() query = dedent( __query.format( trunc=date_range.interval.trunc, trunc_name=trunc_name, date_min=date_range.start, date_max=date_range.end, network_query=network_query, network_region_query=network_region_query, ) ) return query
def weather_observation_query(time_series: TimeSeries, station_codes: List[str]) -> str: if time_series.interval.interval >= 1440: # @TODO replace with mv __query = """ select date_trunc('{trunc}', t.observation_time at time zone '{tz}') as observation_month, t.station_id, avg(t.temp_avg), min(t.temp_min), max(t.temp_max) from ( select time_bucket_gapfill('1 day', observation_time) as observation_time, fs.station_id, case when avg(fs.temp_air) is not null then avg(fs.temp_air) when max(fs.temp_max) is not null and max(fs.temp_min) is not null then ((max(fs.temp_max) + min(fs.temp_min)) / 2) else NULL end as temp_avg, case when min(fs.temp_min) is not null then min(fs.temp_min) else min(fs.temp_air) end as temp_min, case when max(fs.temp_max) is not null then max(fs.temp_max) else max(fs.temp_air) end as temp_max from bom_observation fs where fs.station_id in ({station_codes}) and fs.observation_time <= '{date_end}' and fs.observation_time >= '{date_start}' group by 1, 2 ) as t group by 1, 2; """.format( trunc=time_series.interval.trunc, tz=time_series.network.timezone_database, station_codes=",".join(["'{}'".format(i) for i in station_codes]), date_start=time_series.get_range().start, date_end=time_series.get_range().end, ) else: __query = """ select fs.observation_time at time zone '{tz}' as ot, fs.station_id as station_id, avg(fs.temp_air) as temp_air, case when min(fs.temp_min) is not null then min(fs.temp_min) else min(fs.temp_air) end as temp_min, case when max(fs.temp_max) is not null then max(fs.temp_max) else max(fs.temp_air) end as temp_max from bom_observation fs where fs.station_id in ({station_codes}) and fs.observation_time <= '{date_end}' and fs.observation_time >= '{date_start}' group by 1, 2; """.format( tz=time_series.network.timezone_database, station_codes=",".join(["'{}'".format(i) for i in station_codes]), date_start=time_series.get_range().start, date_end=time_series.get_range().end, ) return dedent(__query)
def energy_station( engine=Depends(get_database_engine), # type: ignore session: Session = Depends(get_database_session), network_code: str = Query(..., description="Network code"), station_code: str = Query(..., description="Station Code"), interval: str = Query(None, description="Interval"), period: str = Query("7d", description="Period"), ) -> OpennemDataSet: """ Get energy output for a station (list of facilities) over a period """ network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval: # @NOTE rooftop data is 15m if station_code.startswith("ROOFTOP"): interval = "15m" else: interval = "{}m".format(network.interval_size) interval_obj = human_to_interval(interval) period_obj = human_to_period(period) units = get_unit("energy") station: Optional[Station] = ( session.query(Station) .join(Station.facilities) .filter(Station.code == station_code) .filter(Facility.network_id == network.code) .one_or_none() ) if not station: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found") if not station.facilities: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station has no facilities", ) # Start date date_start = station.scada_range.date_min date_end = station.scada_range.date_max network_range = get_scada_range(network=network) if not date_start: date_start = network_range.start if not date_end: date_end = network_range.end time_series = TimeSeries( start=date_start, end=date_end, network=network, interval=interval_obj, period=period_obj, ) query = energy_facility_query( time_series, station.facility_codes, ) logger.debug(query) with engine.connect() as c: row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) results_energy = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[2] if len(i) > 1 else None) for i in row ] results_market_value = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[3] if len(i) > 1 else None) for i in row ] results_emissions = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[4] if len(i) > 1 else None) for i in row ] if len(results_energy) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) stats = stats_factory( stats=results_energy, units=units, network=network, interval=interval_obj, period=period_obj, code=station_code, include_group_code=True, ) if not stats: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) stats_market_value = stats_factory( stats=results_market_value, units=get_unit("market_value"), network=network, interval=interval_obj, period=period_obj, code=station_code, include_group_code=True, ) stats.append_set(stats_market_value) stats_emissions = stats_factory( stats=results_emissions, units=get_unit("emissions"), network=network, interval=interval_obj, period=period_obj, code=network.code.lower(), ) stats.append_set(stats_emissions) return stats
def power_flows_network_week( engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> Optional[OpennemDataSet]: engine = get_database_engine() network = network_from_network_code(network_code) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") time_series = TimeSeries( start=scada_range.start, month=month, network=network, interval=interval_obj, period=period_obj, ) query = interconnector_flow_network_regions_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise Exception("No results from query: {}".format(query)) imports = [ DataQueryResult(interval=i[0], result=i[4], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( imports, # code=network_region_code or network.code, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("regional_trade"), # fueltech_group=True, group_field="power", include_group_code=True, include_code=True, ) if not result or not result.data: raise Exception("No results") INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"] inverted_data = [] for ds in result.data: if ds.code in INVERT_SETS: ds_inverted = invert_flow_set(ds) inverted_data.append(ds_inverted) else: inverted_data.append(ds) result.data = inverted_data return result
def fueltech_demand_mix( engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), ) -> OpennemDataSet: """Return fueltech proportion of demand for a network Args: engine ([type], optional): Database engine. Defaults to Depends(get_database_engine). Raises: HTTPException: No results Returns: OpennemData: data set """ engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval("5m") period_obj = human_to_period("1d") scada_range = get_scada_range(network=network) if not scada_range: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) query = network_fueltech_demand_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) result_set = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( result_set, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("emissions_factor"), group_field="emission_factor", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
def power_station( station_code: str = Query(..., description="Station code"), network_code: str = Query(..., description="Network code"), since: datetime = Query(None, description="Since time"), interval_human: str = Query(None, description="Interval"), period_human: str = Query("7d", description="Period"), session: Session = Depends(get_database_session), engine=Depends(get_database_engine), # type: ignore ) -> OpennemDataSet: if not since: since = datetime.now() - human_to_timedelta("7d") network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: # @NOTE rooftop data is 15m if station_code.startswith("ROOFTOP"): interval_human = "15m" else: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period = human_to_period(period_human) units = get_unit("power") station: Optional[Station] = ( session.query(Station) .join(Facility) .filter(Station.code == station_code) .filter(Facility.network_id == network.code) .filter(Station.approved.is_(True)) .one_or_none() ) if not station: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found") facilities_date_range = station.scada_range stats = [] date_min = facilities_date_range.date_min date_max = facilities_date_range.date_max network_range = get_scada_range(network=network) if not date_min: date_min = network_range.start if not date_max: date_max = network_range.end time_series = TimeSeries( start=date_min, end=date_max, network=network, period=period, interval=interval, ) query = power_facility_query(time_series, station.facility_codes) logger.debug(query) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) result = stats_factory( stats, code=station_code, network=network, interval=interval, period=period, include_group_code=True, units=units, ) if not result: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results found", ) return result