def power_facility_query( facility_codes: List[str], network: NetworkSchema, period: TimePeriod, interval: Optional[TimeInterval] = None, date_range: Optional[ScadaDateRange] = None, ) -> str: timezone = network.get_timezone(postgres_format=True) if not date_range: date_range = get_scada_range(network=network, facilities=facility_codes) timezone = network.timezone_database __query = """ select t.trading_interval at time zone '{timezone}', coalesce(avg(t.facility_power), 0), t.facility_code from ( select time_bucket_gapfill('{trunc}', fs.trading_interval) AS trading_interval, coalesce( avg(fs.generated), 0 ) as facility_power, fs.facility_code from facility_scada fs join facility f on fs.facility_code = f.code where fs.trading_interval <= '{date_max}' and fs.trading_interval > '{date_min}' and fs.facility_code in ({facility_codes_parsed}) group by 1, 3 ) as t group by 1, 3 order by 1 desc """ if not interval: interval = network.get_interval() if not date_range: raise Exception("Require a date range for query") if not interval: raise Exception("Require an interval") date_max = date_range.get_end() date_min = date_range.get_start() if period: date_min = date_range.get_end() - timedelta(minutes=period.period) query = __query.format( facility_codes_parsed=duid_in_case(facility_codes), trunc=interval.interval_sql, period=period.period_sql, timezone=timezone, date_max=date_max, date_min=date_min, ) return query
def export_network_intervals_for_week( week_start: datetime, week_end: datetime, network: NetworkSchema, network_region: NetworkRegion, ) -> int | None: """ """ week_number = get_week_number_from_datetime(week_start) logging.info( "Exporting historic intervals for network {} and region {} and year {} and week {} ({} => {})" .format(network.code, network_region.code, week_start.year, week_number, week_start, week_end)) time_series = TimeSeries( start=week_start, end=week_end + timedelta(days=1), network=network, interval=network.get_interval(), period=human_to_period("7d"), ) stat_set = power_week( time_series=time_series, networks_query=network.get_networks_query(), network_region_code=network_region.code, ) if not stat_set: return None # emissions for network emission_intervals = emissions_for_network_interval( time_series=time_series, network_region_code=network_region.code) stat_set.append_set(emission_intervals) # demand and pricing demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=network_region.code, networks=network.get_networks_query()) stat_set.append_set(demand_energy_and_value) if network == NetworkNEM: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=network.get_networks_query(), network_region_code=network_region.code, ) stat_set.append_set(interconnector_flows) bom_station = get_network_region_weather_station(network_region.code) if bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=bom_station, network_region=network_region.code, ) stat_set.append_set(weather_stats) except Exception: pass save_path = ( f"v3/stats/historic/weekly/{network.code}/{network_region.code}/year/{week_start.year}/week/{week_number}.json" ) written_bytes = write_output(save_path, stat_set) return written_bytes
def energy_facility_query( facility_codes: List[str], network: NetworkSchema, period: TimePeriod, interval: Optional[TimeInterval] = None, ) -> str: """ Get Energy for a list of facility codes """ __query = """ select date_trunc('{trunc}', t.trading_interval at time zone '{timezone}') as trading_day, t.code, sum(t.energy) / 1000 as fueltech_energy, sum(t.market_value) as fueltech_market_value, sum(t.emissions) as fueltech_emissions from mv_facility_all t where t.trading_interval <= '{date_max}' and t.trading_interval >= '{date_min}' and t.code in ({facility_codes_parsed}) group by 1, 2 order by trading_day desc; """ timezone = network.timezone_database offset = network.get_timezone(postgres_format=True) date_range: ScadaDateRange = get_scada_range(network=network, facilities=facility_codes) if not interval: interval = network.get_interval() if not date_range: raise Exception("Require a date range for query") if not period: raise Exception("Require a period") if not interval: interval = network.get_interval() if not interval: raise Exception("Require an interval") trunc = interval.trunc date_max = date_range.get_end() date_min = date_range.get_start() if period.period_human == "1M": date_min = date_range.get_end() - timedelta(minutes=period.period) elif period.period_human == "1Y": # might have to do +offset times year = datetime.now().year date_min = "{}-01-01 00:00:00{}".format(year, offset) elif period.period_human in ["7d", "5Y", "10Y"]: date_min = date_range.get_end() - timedelta(minutes=period.period) # elif period.period_human == "all": # else: # date_min = date_range.get_end() - timedelta(minutes=period.period) query = dedent( __query.format( facility_codes_parsed=duid_in_case(facility_codes), trunc=trunc, date_max=date_max, date_min=date_min, timezone=timezone, )) return query