def power_flows_week( time_series: TimeSeries, network_region_code: str, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = interconnector_power_flow(time_series=time_series, network_region=network_region_code) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise Exception("No results from query: {}".format(query)) imports = [ DataQueryResult(interval=i[0], result=i[2], group_by="imports" if len(i) > 1 else None) for i in row ] exports = [ DataQueryResult(interval=i[0], result=i[3], group_by="exports" if len(i) > 1 else None) for i in row ] result = stats_factory( imports, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("power"), region=network_region_code, fueltech_group=True, ) if not result: raise Exception("No results") result_exports = stats_factory( exports, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("power"), region=network_region_code, fueltech_group=True, ) result.append_set(result_exports) return result
def power_network_region_fueltech( network_code: str = Query(..., description="Network code"), network_region_code: str = Query(..., description="Network region code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> OpennemDataSet: network = network_from_network_code(network_code) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") time_series = TimeSeries( start=scada_range.start, month=month, network=network, interval=interval_obj, period=period_obj, ) stat_set = power_week(time_series, network_region_code, include_capacities=True) if not stat_set: raise Exception("No results") return stat_set
def gov_stats_cpi() -> Optional[OpennemDataSet]: engine = get_database_engine() query = country_stats_query(StatTypes.CPI) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in row ] if len(stats) < 1: logger.error("No results for gov_stats_cpi returing blank set") return None result = stats_factory( stats, code="au.cpi", network=NetworkNEM, interval=human_to_interval("1Q"), period=human_to_period("all"), units=get_unit("cpi"), group_field="gov", ) return result
def export_flows() -> None: date_range = get_scada_range(network=NetworkNEM) interchange_stat = StatExport( stat_type=StatType.power, priority=PriorityType.live, country="au", date_range=date_range, network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("7d"), ) time_series = TimeSeries( start=date_range.start, end=date_range.end, network=interchange_stat.network, interval=interchange_stat.interval, period=interchange_stat.period, ) stat_set = power_flows_network_week(time_series=time_series) if stat_set: write_output( f"v3/stats/au/{interchange_stat.network.code}/flows/7d.json", stat_set)
def power_unit( unit_code: str = Query(..., description="Unit code"), network_code: str = Query(..., description="Network code"), interval_human: str = Query(None, description="Interval"), period_human: str = Query("7d", description="Period"), engine=Depends(get_database_engine), ) -> OpennemDataSet: network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period = human_to_period(period_human) units = get_unit("power") stats = [] facility_codes = [normalize_duid(unit_code)] query = power_facility_query(facility_codes, network.code, interval=interval, period=period) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Unit stats not found", ) output = stats_factory( stats, code=unit_code, interval=interval, period=period, units=units, network=network, ) if not output: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No stats found", ) return output
def energy_network_api( engine=Depends(get_database_engine), network_code: str = Query(..., description="Network code"), interval_human: str = Query("1d", description="Interval"), period_human: str = Query("1Y", description="Period"), ) -> OpennemDataSet: results = [] network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period = human_to_period(period_human) units = get_unit("energy_giga") query = energy_network(network=network, interval=interval, period=period) with engine.connect() as c: results = list(c.execute(query)) if len(results) < 1: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No results") stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] result = stats_factory( stats, code=network.code, network=network, interval=interval, period=period, units=units, ) if not result: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results found", ) return result
def get_date_range(network: NetworkSchema) -> DatetimeRange: date_range = get_scada_range(network=NetworkNEM) time_series = TimeSeries( start=date_range.start, end=date_range.end, interval=human_to_interval("1d"), period=human_to_period("all"), network=network, ) return time_series.get_range()
def power_network_region_fueltech( network_code: str = Query(..., description="Network code"), network_region_code: str = Query(..., description="Network region code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> OpennemDataSet: network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") networks = [network] if network == NetworkNEM: networks.append(NetworkAEMORooftop) networks.append(NetworkAEMORooftopBackfill) elif network == NetworkWEM: networks.append(NetworkAPVI) time_series = TimeSeries( start=scada_range.start, month=month, network=network, networks=networks, interval=interval_obj, period=period_obj, ) stat_set = power_week( time_series, network_region_code, include_capacities=True, networks_query=networks ) if not stat_set: raise Exception("No results") return stat_set
def demand_week( time_series: TimeSeries, network_region_code: Optional[str], networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = network_demand_query( time_series=time_series, network_region=network_region_code, networks_query=networks_query, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: logger.error( "No results from network_demand_query with {}".format(time_series)) return None demand = [ DataQueryResult(interval=i[0], result=i[2], group_by="demand" if len(i) > 1 else None) for i in row ] result = stats_factory( demand, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("demand"), region=network_region_code, ) if not result: logger.error( "No results from network_demand_query with {}".format(time_series)) return None return result
def get_power_example() -> OpennemDataSet: network = network_from_network_code("NEM") interval = human_to_interval("5m") units = get_unit("power") period = human_to_period("7d") network_region_code = "NSW1" test_rows = [] dt = datetime.fromisoformat("2021-01-15 10:00:00") for ft in ["coal_black", "coal_brown"]: for v in range(0, 3): test_rows.append([dt, ft, v]) dt = dt + timedelta(minutes=5) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in test_rows ] assert len(stats) == 6, "Should have 6 stats" result = stats_factory( stats, code=network_region_code or network.code, network=network, interval=interval, period=period, units=units, region=network_region_code, fueltech_group=True, ) if not result: raise Exception("Bad unit test data") with open("power-nsw1.json", "w") as fh: fh.write(result.json(indent=4)) return result
def export_all_monthly() -> None: session = get_scoped_session() all_monthly = OpennemDataSet(code="au", data=[], version=get_version(), created_at=datetime.now()) cpi = gov_stats_cpi() all_monthly.append_set(cpi) # Iterate networks and network regions networks = [NetworkNEM, NetworkWEM] for network in networks: network_regions = session.query(NetworkRegion).filter( NetworkRegion.network_id == network.code).all() for network_region in network_regions: networks = [] logging.info( "Exporting monthly for network {} and region {}".format( network.code, network_region.code)) if network_region.code == "WEM": networks = [NetworkWEM, NetworkAPVI] if network == NetworkNEM: networks = [NetworkNEM, NetworkAEMORooftop] logger.debug("Running monthlies for {} and {}".format( network.code, network_region.code)) scada_range: ScadaDateRange = get_scada_range(network=network, networks=networks, energy=True) if not scada_range or not scada_range.start: logger.error( "Could not get scada range for network {} and energy {}". format(network, True)) continue time_series = TimeSeries( start=scada_range.start, end=scada_range.end, network=network, interval=human_to_interval("1M"), period=human_to_period("all"), ) stat_set = energy_fueltech_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) if not stat_set: continue demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=network_region.code, networks=networks) stat_set.append_set(demand_energy_and_value) if network == NetworkNEM: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) stat_set.append_set(interconnector_flows) all_monthly.append_set(stat_set) bom_station = get_network_region_weather_station( network_region.code) if bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=bom_station, network_region=network_region.code, ) all_monthly.append_set(weather_stats) except Exception: pass write_output("v3/stats/au/all/monthly.json", all_monthly)
def power_network_fueltech_api( network_code: str = Query(..., description="Network code"), network_region: str = Query(None, description="Network region"), interval_human: str = Query(None, description="Interval"), period_human: str = Query("7d", description="Period"), engine=Depends(get_database_engine), ) -> OpennemDataSet: network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period = human_to_period(period_human) units = get_unit("power") scada_range = get_scada_range(network=network) query = power_network_fueltech( network=network, interval=interval, period=period, network_region=network_region, scada_range=scada_range, ) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) result = stats_factory( stats, code=network.code, network=network, interval=interval, period=period, units=units, region=network_region, fueltech_group=True, ) if not result: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results found", ) return result
def energy_station( engine=Depends(get_database_engine), session: Session = Depends(get_database_session), network_code: str = Query(..., description="Network code"), station_code: str = Query(..., description="Station Code"), interval: str = Query(None, description="Interval"), period: str = Query("7d", description="Period"), ) -> OpennemDataSet: """ Get energy output for a station (list of facilities) over a period """ network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval: # @NOTE rooftop data is 15m if station_code.startswith("ROOFTOP"): interval = "15m" else: interval = "{}m".format(network.interval_size) interval_obj = human_to_interval(interval) period_obj = human_to_period(period) units = get_unit("energy") station = ( session.query(Station) .join(Station.facilities) .filter(Station.code == station_code) .filter(Facility.network_id == network.code) .one_or_none() ) if not station: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found") if len(station.facilities) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station has no facilities", ) facility_codes = list(set([f.code for f in station.facilities])) query = energy_facility_query( facility_codes, network=network, interval=interval_obj, period=period_obj, ) logger.debug(query) with engine.connect() as c: row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) results_energy = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[2] if len(i) > 1 else None) for i in row ] results_market_value = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[3] if len(i) > 1 else None) for i in row ] results_emissions = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[4] if len(i) > 1 else None) for i in row ] if len(results_energy) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) stats = stats_factory( stats=results_energy, units=units, network=network, interval=interval_obj, period=period_obj, code=station_code, include_group_code=True, ) if not stats: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) stats_market_value = stats_factory( stats=results_market_value, units=get_unit("market_value"), network=network, interval=interval_obj, period=period_obj, code=station_code, include_group_code=True, ) stats.append_set(stats_market_value) stats_emissions = stats_factory( stats=results_emissions, units=get_unit("emissions"), network=network, interval=interval_obj, period=period_obj, code=network.code.lower(), include_group_code=True, ) stats.append_set(stats_emissions) return stats
def energy_interconnector_emissions_region_daily( time_series: TimeSeries, network_region_code: str, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() period: TimePeriod = human_to_period("1Y") units = get_unit("emissions") query = energy_network_interconnector_emissions_query( time_series=time_series, network_region=network_region_code, networks_query=networks_query, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: return None stats = [ RegionFlowEmissionsResult( interval=i[0], flow_from=i[1], flow_to=i[2], energy=i[3], flow_from_emissions=i[4], flow_to_emissions=i[5], ) for i in row ] stats_grouped = net_flows_emissions(network_region_code, stats, time_series.interval) imports = stats_grouped["imports"] exports = stats_grouped["exports"] # imports = [DataQueryResult(interval=i[0], group_by="imports", result=i[5]) for i in row] # exports = [DataQueryResult(interval=i[0], group_by="exports", result=i[4]) for i in row] result = stats_factory( imports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, localize=False, ) # Bail early on no interconnector # don't error if not result: return result result_exports = stats_factory( exports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, localize=False, ) result.append_set(result_exports) return result
def power_station( station_code: str = Query(..., description="Station code"), network_code: str = Query(..., description="Network code"), since: datetime = Query(None, description="Since time"), interval_human: str = Query(None, description="Interval"), period_human: str = Query("7d", description="Period"), session: Session = Depends(get_database_session), engine=Depends(get_database_engine), ) -> OpennemDataSet: if not since: since = datetime.now() - human_to_timedelta("7d") network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: # @NOTE rooftop data is 15m if station_code.startswith("ROOFTOP"): interval_human = "15m" else: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period = human_to_period(period_human) units = get_unit("power") station = ( session.query(Station) .join(Facility) .filter(Station.code == station_code) .filter(Facility.network_id == network.code) .filter(Station.approved.is_(True)) .one_or_none() ) if not station: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Station not found") facility_codes = list(set([f.code for f in station.facilities])) stats = [] query = power_facility_query(facility_codes, network=network, interval=interval, period=period) logger.debug(query) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) result = stats_factory( stats, code=station_code, network=network, interval=interval, period=period, include_group_code=True, units=units, ) if not result: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results found", ) return result
def get_range(self) -> DatetimeRange: """Return a DatetimeRange from the time series for queries""" start = self.start end = self.end # If its a forward looking forecast # jump out early if self.forecast: start = self.end + timedelta(minutes=self.interval.interval) end = self.end + get_human_interval(self.forecast_period) start = start.astimezone(self.network.get_fixed_offset()) end = end.astimezone(self.network.get_fixed_offset()) return DatetimeRange(start=start, end=end, interval=self.interval) # subtract the period (ie. 7d from the end for start if not all) if self.period == human_to_period("all"): start = date_trunc(start, self.interval.trunc) start = start.replace( hour=0, minute=0, second=0, tzinfo=self.network.get_fixed_offset() ) # If its all per month take the end of the last month if self.interval == human_to_interval("1M"): end = date_trunc(get_end_of_last_month(end), "day") end = end.replace( hour=23, minute=59, second=59, tzinfo=self.network.get_fixed_offset() ) self.year = None else: start = self.end - get_human_interval(self.period.period_human) if self.year: if self.year > end.year: raise Exception("Specified year is great than end year") start = start.replace( year=self.year, month=1, day=1, hour=0, minute=0, second=0, tzinfo=self.network.get_fixed_offset(), ) end = datetime( year=self.year, month=12, day=31, hour=23, minute=59, second=59, tzinfo=self.network.get_fixed_offset(), ) if self.year == CUR_YEAR: today = datetime.now(tz=self.network.get_fixed_offset()) end = datetime( year=CUR_YEAR, month=today.month, day=today.day, hour=23, minute=59, second=59 ) end = end - timedelta(days=1) end = end.replace(tzinfo=self.network.get_fixed_offset()) if self.end.date() < today.date(): end = self.end if self.month: start = datetime( year=self.month.year, month=self.month.month, day=1, hour=0, minute=0, second=0, tzinfo=self.network.get_fixed_offset(), ) end = start + get_human_interval("1M") - timedelta(days=1) end = end.replace( hour=23, minute=59, second=59, ) # localize times if not start.tzinfo or start.tzinfo != self.network.get_fixed_offset(): start = start.astimezone(self.network.get_fixed_offset()) if not end.tzinfo or end.tzinfo != self.network.get_fixed_offset(): end = end.astimezone(self.network.get_fixed_offset()) dtr = DatetimeRange(start=start, end=end, interval=self.interval) return dtr
def energy_interconnector_region_daily( time_series: TimeSeries, network_region_code: str, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() period: TimePeriod = human_to_period("1Y") units = get_unit("energy_giga") query = energy_network_flow_query( time_series=time_series, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: return None imports = [DataQueryResult(interval=i[0], group_by="imports", result=i[1]) for i in row] exports = [DataQueryResult(interval=i[0], group_by="exports", result=i[2]) for i in row] imports_mv = [DataQueryResult(interval=i[0], group_by="imports", result=i[3]) for i in row] exports_mv = [DataQueryResult(interval=i[0], group_by="exports", result=i[4]) for i in row] result = stats_factory( imports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, # localize=False, ) # Bail early on no interconnector # don't error if not result: logger.warn("No interconnector energy result") return result result_exports = stats_factory( exports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, ) result.append_set(result_exports) result_imports_mv = stats_factory( imports_mv, units=get_unit("market_value"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=False, ) result.append_set(result_imports_mv) result_export_mv = stats_factory( exports_mv, units=get_unit("market_value"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=False, ) result.append_set(result_export_mv) return result
def emission_factor_per_network( # type: ignore engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), interval: str = Query("30m", description="Interval size"), ) -> Optional[OpennemDataSet]: engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval(interval) period_obj = human_to_period("7d") if not interval_obj: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Invalid interval size") scada_range = get_scada_range(network=network) if not scada_range: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) query = emission_factor_region_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) emission_factors = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( emission_factors, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("emissions_factor"), group_field="emission_factor", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
def fueltech_demand_mix( engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), ) -> OpennemDataSet: """Return fueltech proportion of demand for a network Args: engine ([type], optional): Database engine. Defaults to Depends(get_database_engine). Raises: HTTPException: No results Returns: OpennemData: data set """ engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval("5m") period_obj = human_to_period("1d") scada_range = get_scada_range(network=network) if not scada_range: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) query = network_fueltech_demand_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) result_set = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( result_set, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("emissions_factor"), group_field="emission_factor", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
def export_electricitymap() -> None: date_range = get_scada_range(network=NetworkNEM) if not date_range.start: raise Exception("Could not get a scada range in EM export") interchange_stat = StatExport( stat_type=StatType.power, priority=PriorityType.live, country="au", date_range=date_range, network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("1d"), ) time_series = TimeSeries( start=date_range.start, end=date_range.end, network=interchange_stat.network, networks=[NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill], interval=interchange_stat.interval, period=interchange_stat.period, ) stat_set = power_flows_network_week(time_series=time_series) if not stat_set: raise Exception("No flow results for electricitymap export") em_set = OpennemDataSet(type="custom", version=get_version(), created_at=datetime.now(), data=[]) INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"] for ds in stat_set.data: if ds.code in INVERT_SETS: ds_inverted = invert_flow_set(ds) em_set.data.append(ds_inverted) logging.info("Inverted {}".format(ds.code)) else: em_set.data.append(ds) for region in ["NSW1", "QLD1", "VIC1", "TAS1", "SA1"]: power_set = power_week( time_series, region, include_capacities=True, include_code=False, networks_query=[ NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill ], ) if power_set: em_set.append_set(power_set) date_range = get_scada_range(network=NetworkWEM) # WEM custom time_series = TimeSeries( start=date_range.start, end=date_range.end, network=NetworkWEM, networks=[NetworkWEM, NetworkAPVI], interval=NetworkWEM.get_interval(), period=interchange_stat.period, ) power_set = power_week( time_series, "WEM", include_capacities=True, networks_query=[NetworkWEM, NetworkAPVI], include_code=False, ) if power_set: em_set.append_set(power_set) write_output("v3/clients/em/latest.json", em_set)
def power_flows_network_week( engine=Depends(get_database_engine), # type: ignore network_code: str = Query(..., description="Network code"), month: date = Query(datetime.now().date(), description="Month to query"), ) -> Optional[OpennemDataSet]: engine = get_database_engine() network = network_from_network_code(network_code) interval_obj = network.get_interval() period_obj = human_to_period("1M") scada_range = get_scada_range(network=network) if not scada_range: raise Exception("Require a scada range") if not network: raise Exception("Network not found") time_series = TimeSeries( start=scada_range.start, month=month, network=network, interval=interval_obj, period=period_obj, ) query = interconnector_flow_network_regions_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise Exception("No results from query: {}".format(query)) imports = [ DataQueryResult(interval=i[0], result=i[4], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( imports, # code=network_region_code or network.code, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("regional_trade"), # fueltech_group=True, group_field="power", include_group_code=True, include_code=True, ) if not result or not result.data: raise Exception("No results") INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"] inverted_data = [] for ds in result.data: if ds.code in INVERT_SETS: ds_inverted = invert_flow_set(ds) inverted_data.append(ds_inverted) else: inverted_data.append(ds) result.data = inverted_data return result
def price_network_region_api( engine=Depends(get_database_engine), network_code: str = Query(..., description="Network code"), network_region_code: str = Query(..., description="Region code"), interval_human: str = Query(None, description="Interval"), period_human: str = Query("7d", description="Period"), year: Optional[int] = None, ) -> OpennemDataSet: network = network_from_network_code(network_code) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No such network", ) if not interval_human: interval_human = "{}m".format(network.interval_size) interval = human_to_interval(interval_human) period_obj = None if period_human: period_obj = human_to_period(period_human) units = get_unit("price") scada_range = get_scada_range(network=network) if period_obj and period_obj.period_human == "all" and interval.interval_human == "1M": query = price_network_monthly( network=network, network_region_code=network_region_code, scada_range=scada_range, ) else: query = price_network_region( network=network, network_region_code=network_region_code, interval=interval, period=period_obj, scada_range=scada_range, year=year, ) with engine.connect() as c: results = list(c.execute(query)) if len(results) < 1: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No data found") stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in results ] result = stats_factory( stats, code=network.code, region=network_region_code, network=network, interval=interval, period=period_obj, units=units, group_field="price", ) if not result: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results found", ) return result
def station_observations_api( station_code: str = Query(None, description="Station code"), interval_human: str = Query("15m", description="Interval"), period_human: str = Query("7d", description="Period"), station_codes: List[str] = [], timezone: str = None, offset: str = None, year: int = None, engine=Depends(get_database_engine), ) -> OpennemDataSet: units = get_unit("temperature") if not interval_human: interval_human = "15m" if not period_human: period_human = "7d" if station_code: station_codes = [station_code] interval = human_to_interval(interval_human) period = human_to_period(period_human) if timezone: timezone = ZoneInfo(timezone) if offset: timezone = get_fixed_timezone(offset) query = observation_query( station_codes=station_codes, interval=interval, period=period, year=year, ) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) result = stats_factory( stats=stats, units=units, interval=interval, period=period, code="bom", group_field="temperature", ) return result
def generate_weekly_export_map() -> StatMetadata: """ Generate export map for weekly power series @TODO deconstruct this into separate methods and schema ex. network.get_scada_range(), network_region.get_bom_station() etc. """ session = get_scoped_session() networks = session.query(Network).filter( Network.export_set.is_(True)).all() if not networks: raise Exception("No networks") countries = list(set([network.country for network in networks])) _exmap = [] # Loop countries for country in countries: # @TODO derive this scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM]) if not scada_range: raise Exception("Require a scada range for NetworkAU") for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=country, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], year=year, week=week, date_range=date_range_from_week(year, week, NetworkAU), interval=human_to_interval("30m"), period=human_to_period("7d"), ) _exmap.append(export) # Loop networks for network in networks: network_schema = network_from_network_code(network.code) scada_range = get_scada_range(network=network_schema) if not scada_range: raise Exception("Require a scada range for network: {}".format( network.code)) for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=network.country, network=network_schema, year=year, week=week, date_range=date_range_from_week(year, week, NetworkAU), interval=human_to_interval(f"{network.interval_size}m"), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) # Skip cases like wem/wem where region is supurfelous if len(network.regions) < 2: continue for region in network.regions: scada_range = get_scada_range(network=network_schema, network_region=region.code) if not scada_range: logger.error( "Require a scada range for network {} and region {}". format(network_schema.code, region.code)) continue for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=network.country, network=network_schema, year=year, week=week, date_range=date_range_from_week( year, week, network_from_network_code(network.code)), interval=human_to_interval(f"{network.interval_size}m"), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export_meta = StatMetadata(date_created=datetime.now(), version=get_version(), resources=_exmap) return export_meta
def export_energy( stats: List[StatExport] = None, priority: Optional[PriorityType] = None, latest: Optional[bool] = False, ) -> None: """ Export energy stats from the export map """ if not stats: export_map = get_export_map().get_by_stat_type(StatType.energy) if priority: export_map = export_map.get_by_priority(priority) stats = export_map.resources CURRENT_YEAR = datetime.now().year logger.info(f"Running export_energy with {len(stats)} stats") for energy_stat in stats: if energy_stat.stat_type != StatType.energy: continue # @FIX trim to NEM since it's the one with the shortest # data time span. # @TODO find a better and more flexible way to do this in the # range method date_range_networks = energy_stat.networks or [] if NetworkNEM in date_range_networks: date_range_networks = [NetworkNEM] date_range: ScadaDateRange = get_scada_range( network=energy_stat.network, networks=date_range_networks, energy=True) if not date_range: logger.error( "Skipping - Could not get date range for energy {} {}".format( energy_stat.network, date_range_networks)) continue logger.debug("Date range is: {} {} => {}".format( energy_stat.network.code, date_range.start, date_range.end)) # Migrate to this time_series time_series = TimeSeries( start=date_range.start, end=date_range.end, network=energy_stat.network, year=energy_stat.year, interval=energy_stat.interval, period=human_to_period("1Y"), ) if energy_stat.year: if latest and energy_stat.year != CURRENT_YEAR: continue stat_set = energy_fueltech_daily( time_series=time_series, networks_query=energy_stat.networks, network_region_code=energy_stat.network_region_query or energy_stat.network_region, ) if not stat_set: continue demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=energy_stat.network_region, networks=energy_stat.networks) stat_set.append_set(demand_energy_and_value) # Hard coded to NEM only atm but we'll put has_interconnectors # in the metadata to automate all this if energy_stat.network == NetworkNEM and energy_stat.network_region: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=energy_stat.networks, network_region_code=energy_stat.network_region_query or energy_stat.network_region, ) stat_set.append_set(interconnector_flows) if energy_stat.bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=energy_stat.bom_station, network_region=energy_stat.network_region, ) stat_set.append_set(weather_stats) except NoResults as e: logger.info("No results for weather result: {}".format(e)) except Exception as e: logger.error("weather_stat exception: {}".format(e)) pass else: logger.info("Stat set has no bom station") write_output(energy_stat.path, stat_set) elif energy_stat.period and energy_stat.period.period_human == "all" and not latest: time_series.period = human_to_period("all") time_series.interval = human_to_interval("1M") time_series.year = None stat_set = energy_fueltech_daily( time_series=time_series, networks_query=energy_stat.networks, network_region_code=energy_stat.network_region_query or energy_stat.network_region, ) if not stat_set: continue demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=energy_stat.network_region, networks=energy_stat.networks) stat_set.append_set(demand_energy_and_value) # Hard coded to NEM only atm but we'll put has_interconnectors # in the metadata to automate all this if energy_stat.network == NetworkNEM and energy_stat.network_region: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=energy_stat.networks, network_region_code=energy_stat.network_region_query or energy_stat.network_region, ) stat_set.append_set(interconnector_flows) if energy_stat.bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=energy_stat.bom_station, network_region=energy_stat.network_region, ) stat_set.append_set(weather_stats) except NoResults as e: logger.info("No weather results: {}".format(e)) except Exception: pass write_output(energy_stat.path, stat_set)
def export_all_daily( networks: List[NetworkSchema] = [NetworkNEM, NetworkWEM], network_region_code: Optional[str] = None, ) -> None: session = get_scoped_session() cpi = gov_stats_cpi() for network in networks: network_regions = session.query(NetworkRegion).filter_by( export_set=True).filter_by(network_id=network.code) if network_region_code: network_regions = network_regions.filter_by( code=network_region_code) network_regions = network_regions.all() for network_region in network_regions: logging.info("Exporting for network {} and region {}".format( network.code, network_region.code)) networks = [ NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill ] if network_region.code == "WEM": networks = [NetworkWEM, NetworkAPVI] scada_range: ScadaDateRange = get_scada_range(network=network, networks=networks, energy=True) if not scada_range or not scada_range.start: logger.error( "Could not get scada range for network {} and energy {}". format(network, True)) continue time_series = TimeSeries( start=scada_range.start, end=scada_range.end, network=network, interval=human_to_interval("1d"), period=human_to_period("all"), ) stat_set = energy_fueltech_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) if not stat_set: continue demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=network_region.code, networks=networks) stat_set.append_set(demand_energy_and_value) # Hard coded to NEM only atm but we'll put has_interconnectors # in the metadata to automate all this if network == NetworkNEM: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) stat_set.append_set(interconnector_flows) bom_station = get_network_region_weather_station( network_region.code) if bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=bom_station, network_region=network_region.code, ) stat_set.append_set(weather_stats) except Exception: pass if cpi: stat_set.append_set(cpi) write_output(f"v3/stats/au/{network_region.code}/daily.json", stat_set)
def energy_network_fueltech_api( network_code: str = Query(None, description="Network code"), network_region: str = Query(None, description="Network region"), interval_human: str = Query("1d", description="Interval"), year: int = Query(None, description="Year to query"), period_human: str = Query("1Y", description="Period"), engine=Depends(get_database_engine), ) -> OpennemDataSet: network = network_from_network_code(network_code) interval = human_to_interval(interval_human) period_obj: TimePeriod = human_to_period("1Y") if period_human: period_obj = human_to_period(period_human) units = get_unit("energy_giga") query = "" if year and isinstance(year, int): period_obj = human_to_period("1Y") if year > datetime.now().year or year < 1996: raise HTTPException( status_code=status.HTTP_406_NOT_ACCEPTABLE, detail="Not a valid year", ) scada_range = get_scada_range(network=network) query = energy_network_fueltech_year( network=network, interval=interval, year=year, network_region=network_region, scada_range=scada_range, ) elif period_obj and period_obj.period_human == "all": scada_range = get_scada_range(network=network) query = energy_network_fueltech_all( network=network, network_region=network_region, scada_range=scada_range, ) else: query = energy_network_fueltech( network=network, interval=interval, period=period_obj, network_region=network_region, ) # print(query) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Energy stats not found", ) result = stats_factory( stats, code=network.code, network=network, interval=interval, period=period_obj, units=units, region=network_region, fueltech_group=True, ) if not result: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No stats") return result
def price_network_endpoint( engine: Engine = Depends(get_database_engine), network_code: str = Path(..., description="Network code"), network_region: Optional[str] = Query(None, description="Network region code"), forecasts: bool = Query(False, description="Include price forecasts"), ) -> OpennemDataSet: """Returns network and network region price info for interval which defaults to network interval size Args: engine ([type], optional): Database engine. Defaults to Depends(get_database_engine). Raises: HTTPException: No results Returns: OpennemData: data set """ engine = get_database_engine() network = None try: network = network_from_network_code(network_code) except Exception: raise HTTPException(detail="Network not found", status_code=status.HTTP_404_NOT_FOUND) interval_obj = human_to_interval("5m") period_obj = human_to_period("1d") scada_range = get_balancing_range(network=network, include_forecasts=forecasts) if not scada_range: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Could not find a date range", ) if not network: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Network not found", ) time_series = TimeSeries( start=scada_range.start, network=network, interval=interval_obj, period=period_obj, ) if network_region: time_series.network.regions = [NetworkNetworkRegion(code=network_region)] query = network_region_price_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) result_set = [ DataQueryResult(interval=i[0], result=i[3], group_by=i[2] if len(i) > 1 else None) for i in row ] result = stats_factory( result_set, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("price"), group_field="price", include_group_code=True, include_code=True, ) if not result or not result.data: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="No results", ) return result
@pytest.mark.parametrize( [ "ts", "start_expected", "end_expected", "interval_expected", "length_expected" ], [ # Test 1 hour inclusive ( TimeSeries( start=datetime.fromisoformat("2021-01-15 12:00:00+00:00"), end=datetime.fromisoformat("2021-01-15 13:00:00+00:00"), network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("1h"), ), # Also testing timezone shift from UTC to NEM time datetime.fromisoformat("2021-01-15 22:00:00+10:00"), datetime.fromisoformat("2021-01-15 23:00:00+10:00"), "5m", 13, # number of 5 minute intervals in an hour _inclusive_ ), # Test 1 week inclusive ( TimeSeries( start=datetime.fromisoformat("1997-05-05 12:45:00+00:00"), end=datetime.fromisoformat("2021-01-15 12:45:00+00:00"), network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("7d"),
def get_export_map() -> StatMetadata: """ Generates a map of all export JSONs """ session = SessionLocal() networks = session.query(Network).filter(Network.export_set.is_(True)).all() if not networks: raise Exception("No networks") countries = list(set([network.country for network in networks])) _exmap = [] for country in countries: # @TODO derive this scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM]) if not scada_range: raise Exception("Require a scada range") export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], interval=NetworkAU.get_interval(), period=human_to_period("7d"), ) _exmap.append(export) for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], year=year, interval=human_to_interval("1d"), period=human_to_period("1Y"), ) _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], interval=human_to_interval("1M"), period=human_to_period("all"), ) _exmap.append(export) for network in networks: network_schema = network_from_network_code(network.code) scada_range = get_scada_range(network=network_schema) bom_station = get_network_region_weather_station(network.code) export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, interval=network_schema.get_interval(), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) if not scada_range: raise Exception("Require a scada range") for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, year=year, period=human_to_period("1Y"), interval=human_to_interval("1d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, interval=human_to_interval("1M"), period=human_to_period("all"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) # Skip cases like wem/wem where region is supurfelous if len(network.regions) < 2: continue for region in network.regions: scada_range = get_scada_range(network=network_schema, network_region=region) bom_station = get_network_region_weather_station(region.code) if not scada_range: raise Exception("Require a scada range") export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, period=human_to_period("7d"), interval=network_schema.get_interval(), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, year=year, period=human_to_period("1Y"), interval=human_to_interval("1d"), ) _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, period=human_to_period("all"), interval=human_to_interval("1M"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export_meta = StatMetadata( date_created=datetime.now(), version=get_version(), resources=_exmap ) return export_meta