def run_energy_update_yesterday(network: NetworkSchema = NetworkNEM, days: int = 1) -> None: """Run energy sum update for yesterday. This task is scheduled in scheduler/db This is NEM only atm""" # This is Sydney time as the data is published in local time tz = pytz.timezone("Australia/Sydney") # today_midnight in NEM time today_midnight = datetime.now(tz).replace( tzinfo=network.get_fixed_offset(), microsecond=0, hour=0, minute=0, second=0) date_max = today_midnight date_min = today_midnight - timedelta(days=days) regions = [i.code for i in get_network_regions(network)] for region in regions: run_energy_calc(region, date_min, date_max, network=network) slack_message("Ran energy dailies for regions: {}".format( ",".join(regions)))
def update_facility_seen_range( include_first_seen: bool = False, facility_codes: Optional[List[str]] = None, ) -> bool: """Updates last seen and first seen. For each facility updates the date the facility was seen for the first and last time in the power data from FacilityScada. Args: include_first_seen (bool, optional): Include earliest seen time. Defaults to False. facility_codes (Optional[List[str]], optional): List of facility codes to update. Defaults to None. Returns: bool: Ran successfuly """ engine = get_database_engine() __query = get_update_seen_query(include_first_seen=include_first_seen, facility_codes=facility_codes) with engine.connect() as c: logger.debug(__query) c.execute(__query) slack_message("Ran facility_seen_range") return True
def alert_missing_emission_factors() -> None: missing_factors = get_facility_no_emission_factor() for rec in missing_factors: slack_message("{} in {} {} with fueltech {} is missing factor".format( rec.station_name, rec.network_id, rec.network_region, rec.fueltech_id))
def aemo_wem_live_interval() -> bool: """ Monitors the delay from the AEMO live scada data on the portal """ network = NetworkWEM now_date = datetime.now().astimezone(network.get_timezone()) live_most_recent = get_aemo_wem_live_facility_intervals_recent_date() live_delta = chop_microseconds(now_date - live_most_recent) logger.debug( "Live time: {}, delay: {}".format(live_most_recent, live_delta) ) # @TODO move the minutes into settings if live_delta > timedelta(minutes=90): slack_message( "*WARNING*: AEMO Live intervals for WEM on {} curently delayed by {}\n\nAEMO feed most recent: {}".format( settings.env, live_delta, live_most_recent ) ) return True return False
def check_generated_gaps(days: int = 3) -> None: """Process for checking how many generation gaps there might be""" for network in [NetworkNEM, NetworkWEM, NetworkAPVI, NetworkAEMORooftop]: for _, gap_type in enumerate(GapfillType): gaps = run_generated_gapfill_for_network(days=days, network=network, gap_type=gap_type) if gaps: slack_message(f"Found {len(gaps)} generation gaps in {network.code} for {gap_type} @nik") logger.error(f"Found {len(gaps)} generation gaps in {network.code} for {gap_type}")
def run_energy_calc( date_min: datetime, date_max: datetime, network: NetworkSchema, region: Optional[str] = None, fueltech_id: Optional[str] = None, facility_codes: Optional[List[str]] = None, run_clear: bool = False, ) -> int: """ Runs the actual energy calc - believe it or not """ generated_results: List[Dict] = [] flow = None if fueltech_id: flow = fueltech_to_flow(fueltech_id) # @TODO get rid of the hard-coded networknem part if flow and region and network == NetworkNEM: generated_results = get_flows(date_min, date_max, network_region=region, network=network, flow=flow) else: generated_results = get_generated( date_min, date_max, network_region=region, network=network, fueltech_id=fueltech_id, run_clear=run_clear, facility_codes=facility_codes, ) num_records = 0 try: if len(generated_results) < 1: logger.warning( "No results from get_generated query for {} {} {}".format( region, date_max, fueltech_id)) return 0 generated_frame = shape_energy_dataframe(generated_results, network=network) num_records = insert_energies(generated_frame, network=network) logger.info("Done {} for {} => {}".format(region, date_min, date_max)) except Exception as e: logger.error(e) slack_message("Energy archive error: {}".format(e)) return num_records
def facility_first_seen_check() -> None: """ Find new DUIDs and alert on them """ facs = get_facility_first_seen("3 days") facs_filtered = ignored_duids(facs) for fac in facs_filtered: msg = "Found new facility on network {} with DUID: {}".format( fac.network_id, fac.code) slack_message(msg) logger.info(msg)
def db_refresh_material_views() -> None: run_energy_update_days(days=2) run_aggregates_facility_year(DATE_CURRENT_YEAR) run_daily_fueltech_summary() refresh_material_views("mv_facility_all") refresh_material_views("mv_region_emissions") refresh_material_views("mv_interchange_energy_nem_region") export_energy(latest=True) export_energy(priority=PriorityType.monthly) slack_message("Ran daily energy update and aggregates on {}".format( settings.env))
def check_opennem_interval_delays(network_code: str) -> bool: """Runs periodically and alerts if there is a current delay in output of power intervals""" network = network_from_network_code(network_code) env = "" if settings.debug: env = ".dev" url = f"https://data{env}.opennem.org.au/v3/stats/au/{network.code}/power/7d.json" resp = http.get(url) if resp.status_code != 200 or not resp.ok: logger.error("Error retrieving: {}".format(url)) return False resp_json = resp.json() if "data" not in resp_json: logger.error("Error retrieving wem power: malformed response") return False data = resp_json["data"] fueltech_data = data.pop() history_end_date = fueltech_data["history"]["last"] history_date = parse_date(history_end_date, dayfirst=False) if not history_date: logger.error( "Could not read history date for opennem interval monitor") return False now_date = datetime.now().astimezone( network.get_timezone()) # type: ignore time_delta = chop_delta_microseconds(now_date - history_date) - timedelta( minutes=network.interval_size) logger.debug("Live time: {}, delay: {}".format(history_date, time_delta)) alert_threshold = network.monitor_interval_alert_threshold or settings.monitor_interval_alert_threshold or 60 if time_delta > timedelta(minutes=alert_threshold): slack_message( f"*WARNING*: OpenNEM {network.code} interval delay on {settings.env} currently: {time_delta}.\n", tag_users=settings.monitoring_alert_slack_user, ) return True
def update_facility_seen_range(last_seen: bool = True) -> bool: """Updates last seen and first seen""" engine = get_database_engine() __query = get_update_seen_query(last_seen) with engine.connect() as c: logger.debug(__query) c.execute(__query) slack_message("Ran facility_seen_range") return True
def check_opennem_interval_delays(network_code: str) -> bool: network = network_from_network_code(network_code) if settings.debug: env = ".dev" url = ( f"https://data.opennem.org.au/v3/stats/au/{network.code}/power/7d.json" f"https://data{env}.opennem.org.au/v3/stats/au/{network.code}/power/7d.json" ) resp = http.get(url) if resp.status_code != 200 or not resp.ok: logger.error("Error retrieving: {}".format(url)) return False resp_json = resp.json() if "data" not in resp_json: logger.error("Error retrieving wem power: malformed response") return False data = resp_json["data"] fueltech_data = data.pop() history_end_date = fueltech_data["history"]["last"] history_date = parse_date(history_end_date, dayfirst=False) if not history_date: logger.error( "Could not read history date for opennem interval monitor") return False now_date = datetime.now().astimezone(network.get_timezone()) time_delta = chop_microseconds(now_date - history_date) logger.debug("Live time: {}, delay: {}".format(history_date, time_delta)) if time_delta > timedelta(hours=3): slack_message( "*WARNING*: OpenNEM {} interval delay on {} currently: {}\n". format(network.code, settings.env, time_delta)) return True
def run_energy_calc( region: str, date_min: datetime, date_max: datetime, network: NetworkSchema, fueltech_id: Optional[str] = None, ) -> int: generated_results: List[Dict] = [] flow = None if fueltech_id: flow = fueltech_to_flow(fueltech_id) if flow: generated_results = get_flows(region, date_min, date_max, network=network, flow=flow) else: generated_results = get_generated(region, date_min, date_max, network=network, fueltech_id=fueltech_id) num_records = 0 try: if len(generated_results) < 1: logger.warning( "No results from get_generated query for {} {} {}".format( region, date_max, fueltech_id)) return 0 generated_frame = shape_energy_dataframe(generated_results) num_records = insert_energies(generated_frame, network=network) logger.info("Done {} for {} => {}".format(region, date_min, date_max)) except Exception as e: logger.error(e) slack_message("Energy archive error: {}".format(e)) return num_records
def run_daily_fueltech_summary() -> None: ds = get_daily_fueltech_summary() _render = serve_template("tweet_daily_summary.md", ds=ds) logger.debug(_render) slack_sent = slack_message(_render) if slack_sent: logger.info("Sent slack message") else: logger.error("Could not send slack message for daily fueltech summary")
def check_database_live() -> None: """Check if the database is live and alert if not""" has_error = False msg = "" try: _test_connection() except Exception as e: has_error = True msg = "Database connection error: {}".format(e.__class__) if has_error: global LAST_ALERTED msg_was_sent = slack_message("Opennem {}".format(msg), tag_users=["nik"]) if msg_was_sent: LAST_ALERTED = datetime.now()
def schedule_export_all_monthly() -> None: if settings.workers_run: export_all_monthly() slack_message("Finished running export_all_monthly")
def run_energy_update_archive( year: Optional[int] = None, months: Optional[List[int]] = None, days: Optional[int] = None, regions: Optional[List[str]] = None, fueltech: Optional[str] = None, network: NetworkSchema = NetworkNEM, ) -> None: date_range = get_date_range(network=network) years: List[int] = [year] if not year: years = [i for i in range(CUR_YEAR, YEAR_EARLIEST - 1, -1)] if not months: months = list(range(1, 13)) if not regions: regions = [i.code for i in get_network_regions(network)] fueltechs = [fueltech] if not fueltech: fueltechs = [i for i in load_fueltechs().keys()] for y in years: for month in months: date_min = datetime(year=y, month=month, day=1, hour=0, minute=0, second=0, tzinfo=FixedOffset(600)) date_max = date_min + get_human_interval("1M") if days: date_max = datetime( year=y, month=month, day=1 + days, hour=0, minute=0, second=0, tzinfo=FixedOffset(600), ) date_min = date_min - timedelta(minutes=10) date_max = date_max + timedelta(minutes=10) if date_max > date_range.end: date_max = date_range.end if date_min > date_max: slack_message("Reached end of energy archive") logger.debug("reached end of archive") break for region in regions: for fueltech_id in fueltechs: run_energy_calc(region, date_min, date_max, fueltech_id=fueltech_id, network=network)
def schedule_export_all_daily() -> None: if settings.workers_run: export_all_daily() slack_message("Finished running export_all_daily on {}".format(settings.env))
def db_facility_seen_update() -> None: if settings.workers_db_run: r = update_facility_seen_range() if r: slack_message("Ran facility seen range on {}".format(settings.env))
def db_facility_seen_update() -> None: update_facility_seen_range() slack_message(f"Updated facility seen range on {settings.env}")
def run_run_network_data_range_update() -> None: run_network_data_range_update() slack_message("Ran network data range on {}".format(settings.env))
def task_run_backup() -> None: dest_file = run_backup() slack_message(f"Ran backup on {settings.env} to {dest_file}")
def schedule_daily_tasks() -> None: if settings.workers_run: export_energy(priority=PriorityType.daily) slack_message("Finished running energy dailies")
def schedule_energy_monthlies() -> None: if settings.workers_run: export_energy(priority=PriorityType.monthly) slack_message("Finished running energy_monthlies")
def schedule_export_all_monthly() -> None: export_all_monthly() slack_message("Finished running export_all_monthly")