def crawl_cli_run(name: str, all: bool = False) -> None: test_proxy() console.log("Run crawlers matching: {}".format(name)) crawlers = get_crawl_set() try: crawlers_filtered = crawlers.get_crawlers_by_match(name, only_active=False) except Exception as e: console.log("[red]Could not find crawlers for {}[/red]: {}".format( name, e)) return None if not crawlers_filtered: console.log("No crawlers found matchin [red]{}[/red]".format(name)) return None console.log("[green]Running {} crawlers[/green]".format( len(crawlers_filtered))) for c in crawlers_filtered: console.log( "Running crawler {} (Version: {})\n\tlast_crawled: {}\n\tlast_processed: {}\n\tserver_latest: {}" .format(c.name, c.version, c.last_crawled, c.last_processed, c.server_latest)) run_crawl(c, latest=not all)
def crawler_live_nemweb_dispatch_is() -> None: run_crawl(AEMONemwebDispatchIS)
def crawl_run_aemo_nemweb_dispatch_scada() -> None: run_crawl(AEMONNemwebDispatchScada) export_power(priority=PriorityType.live)
def crawler_run_aemo_nemweb_dispatch_actual_load() -> None: run_crawl(AEMONEMNextDayDispatch)
def crawler_run_aemo_nemweb_dispatch_actual_gen() -> None: run_crawl(AEMONEMDispatchActualGEN)
def crawler_run_aemo_nemweb_rooftop_forecast() -> None: run_crawl(AEMONemwebRooftopForecast)
def crawler_run_wem_balancing() -> None: run_crawl(WEMBalancing)
def crawler_run_wem_facility_scada() -> None: run_crawl(WEMFacilityScada)
def crawler_run_wem_scada_live() -> None: run_crawl(WEMFacilityScadaLive)
def crawler_run_wem_balancing_live() -> None: run_crawl(WEMBalancingLive)
def crawler_run_apvi_today() -> None: run_crawl(APVIRooftopTodayCrawler)
def crawler_run_bom_capitals() -> None: run_crawl(BOMCapitals)
def crawler_live_nemweb_rooftop() -> None: run_crawl(AEMONemwebRooftop)
def crawler_live_nemweb_trading_is() -> None: run_crawl(AEMONemwebTradingIS)
def run_crawls() -> None: cs = get_crawl_set() for crawler in cs.crawlers: if crawler.schedule and crawler.schedule == CrawlerSchedule.live: run_crawl(crawler)