class OpennemBaseSchema(BaseConfig): version: str = get_version() created_at: datetime = chop_datetime_microseconds(datetime.now()) response_status: ResponseStatus = ResponseStatus.OK _version_fromstr = validator("created_at", allow_reuse=True, pre=True)(optionally_parse_string_datetime)
def test_version() -> None: version = get_version(VersionPart.MAJOR) assert version == "3"
from opennem.api.tasks.router import router as tasks_router from opennem.api.weather.router import router as weather_router from opennem.core.time import INTERVALS, PERIODS from opennem.core.units import UNITS from opennem.db import database, get_database_session from opennem.db.models.opennem import FuelTech, Network, NetworkRegion from opennem.schema.network import NetworkRegionSchema, NetworkSchema from opennem.schema.opennem import FueltechSchema from opennem.schema.time import TimeInterval, TimePeriod from opennem.schema.units import UnitDefinition from opennem.settings import settings from opennem.utils.version import get_version logger = logging.getLogger(__name__) app = FastAPI(title="OpenNEM", debug=settings.debug, version=get_version()) Instrumentator().instrument(app).expose(app) try: from fastapi.staticfiles import StaticFiles app.mount( "/static", StaticFiles(directory=settings.static_folder_path), name="static", ) except Exception as e: logger.error("Error initializing static hosting: {}".format(e)) app.include_router(stats_router, tags=["Stats"], prefix="/stats") app.include_router(locations_router, tags=["Locations"], prefix="/locations")
def stats_factory( stats: List[DataQueryResult], units: UnitDefinition, interval: TimeInterval, period: Optional[TimePeriod] = None, network: Optional[NetworkSchema] = None, timezone: Optional[Union[timezone, str]] = None, code: Optional[str] = None, region: Optional[str] = None, include_group_code: bool = False, fueltech_group: Optional[bool] = False, group_field: Optional[str] = None, data_id: Optional[str] = None, localize: Optional[bool] = True, include_code: Optional[bool] = True, ) -> Optional[OpennemDataSet]: """ Takes a list of data query results and returns OpennemDataSets @TODO optional groupby field @TODO multiple groupings / slight refactor """ if network: timezone = network.get_timezone() group_codes = list(set([i.group_by for i in stats if i.group_by])) stats_grouped = [] for group_code in group_codes: data_grouped: Dict[datetime, Any] = dict() for stat in stats: if stat.group_by != group_code: continue if stat.interval not in data_grouped: data_grouped[stat.interval] = None # if stat.result: data_grouped[stat.interval] = stat.result data_sorted = OrderedDict(sorted(data_grouped.items())) data_value = list(data_sorted.values()) # Skip null series if len([i for i in data_value if i]) == 0: continue # @TODO possible bring this back # Skip zero series # if sum([i for i in data_value if i]) == 0: # continue # Cast trailing nulls if not units.name.startswith("temperature") or units.cast_nulls: data_value = cast_trailing_nulls(data_value) # Find start/end dates dates = list(data_grouped.keys()) if not dates: return None start = min(dates) end = max(dates) # should probably make sure these are the same TZ if localize: if timezone and not is_aware(start): start = make_aware(start, timezone) if timezone and not is_aware(end): end = make_aware(end, timezone) if timezone and localize and network and network.offset: tz = pytz.FixedOffset(int(network.offset)) start = start.astimezone(tz) end = end.astimezone(tz) # Everything needs a timezone even flat dates if network and timezone and not is_aware(start): start = start.replace(tzinfo=network.get_fixed_offset()) if network and timezone and not is_aware(end): end = end.replace(tzinfo=network.get_fixed_offset()) # free dates = [] history = OpennemDataHistory( start=start, last=end, interval=interval.interval_human, data=data_value, ) data = OpennemData( data_type=units.unit_type, units=units.unit, # interval=interval, # period=period, history=history, ) if include_code: data.code = group_code if network: data.network = network.code.lower() # *sigh* - not the most flexible model # @TODO fix this schema and make it more flexible if fueltech_group: data.fuel_tech = group_code data_comps = [ # @NOTE disable for now since FE doesn't # support it network.country if network else None, network.code.lower() if network else None, region.lower() if region and region.lower() != network.code.lower() else None, "fuel_tech", group_code, units.unit_type, ] data.id = ".".join(i for i in data_comps if i) # @TODO make this an alias data.type = units.unit_type if group_field: group_fields = [] # setattr(data, group_field, group_code) if network: group_fields.append(network.country.lower()) group_fields.append(network.code.lower()) if region: if region.lower() != network.code.lower(): group_fields.append(region.lower()) if units.name_alias: group_fields.append(units.name_alias) elif units.unit_type: group_fields.append(units.unit_type) if group_code and include_group_code: group_fields.append(group_code) group_fields.append(group_field) data.id = ".".join([f for f in group_fields if f]) data.type = units.unit_type if data_id: data.id = data_id if not data.id: _id_list = [] # @NOTE disable for now since FE doesn't # support it # network.country if network else None, if network: _id_list.append(network.code.lower()) if region and (region.lower() != network.code.lower()): _id_list.append(region.lower()) if group_code: _id_list.append(group_code.lower()) if units and units.name_alias: _id_list.append(units.name_alias) elif units and units.name: _id_list.append(units.name) data.id = ".".join([f for f in _id_list if f]) data.type = units.unit_type if region: data.region = region stats_grouped.append(data) dt_now = datetime.now() if network: dt_now = dt_now.astimezone(network.get_timezone()) # @NOTE this should probably be # country.network.region if not code: if network: code = network.code if region: code = region stat_set = OpennemDataSet( type=units.unit_type, data=stats_grouped, created_at=dt_now, version=get_version(), ) if include_code: stat_set.code = code if network: stat_set.network = network.code if region: stat_set.region = region return stat_set
from opennem.core.units import UNITS from opennem.db import database, get_database_session from opennem.db.models.opennem import FuelTech, Network, NetworkRegion from opennem.schema.network import NetworkRegionSchema, NetworkSchema from opennem.schema.opennem import FueltechSchema from opennem.schema.time import TimeInterval, TimePeriod from opennem.schema.units import UnitDefinition from opennem.settings import settings from opennem.utils.http_cache import PydanticCoder from opennem.utils.version import get_version logger = logging.getLogger(__name__) app = FastAPI(title="OpenNEM", debug=settings.debug, version=get_version(), redoc_url="/docs", docs_url=None) Instrumentator().instrument(app).expose(app) try: from fastapi.staticfiles import StaticFiles app.mount( "/static", StaticFiles(directory=settings.static_folder_path), name="static", ) except Exception as e: logger.error("Error initializing static hosting: {}".format(e))
def main(): parser = argparse.ArgumentParser() parser.add_argument("task", nargs="?", default="power", help="Task type to run") parser.add_argument( "-p", "--priority", dest="priority", default="live", type=str, help="task priority type to run (default: live)", ) parser.add_argument( "-v", "--verbose", action="count", dest="verbosity", default=0, help="verbose output (repeat for increased verbosity)", ) parser.add_argument( "-d", "--debug", dest="debug", action="store_true", default=False, help="run in debug mode", ) parser.add_argument( "-q", "--quiet", action="store_const", const=-1, default=0, dest="verbosity", help="quiet output (show errors only)", ) parser.add_argument( "--latest", dest="latest", action="store_true", default=False, help="run only latest", ) args = parser.parse_args() if args.debug: args.verbosity = 5 _setup_logger(args.verbosity) print("opennem.tasks: v{}".format(get_version())) priority = priority_from_name(args.priority) if not priority: logger.error("Priority not found") return None print(args.task, priority) task_name = "export_{}".format(args.task) if task_name not in globals(): logger.error("Task not found") return None _task = globals()[task_name] print("Running {} with priority {} and latest is {}".format( task_name, priority, args.latest)) run = _task(priority=priority, latest=args.latest) return run
def get_export_map() -> StatMetadata: """ Generates a map of all export JSONs """ session = SessionLocal() networks = session.query(Network).filter(Network.export_set.is_(True)).all() if not networks: raise Exception("No networks") countries = list(set([network.country for network in networks])) _exmap = [] for country in countries: # @TODO derive this scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM]) if not scada_range: raise Exception("Require a scada range") export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], interval=NetworkAU.get_interval(), period=human_to_period("7d"), ) _exmap.append(export) for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], year=year, interval=human_to_interval("1d"), period=human_to_period("1Y"), ) _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=country, date_range=scada_range, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], interval=human_to_interval("1M"), period=human_to_period("all"), ) _exmap.append(export) for network in networks: network_schema = network_from_network_code(network.code) scada_range = get_scada_range(network=network_schema) bom_station = get_network_region_weather_station(network.code) export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, interval=network_schema.get_interval(), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) if not scada_range: raise Exception("Require a scada range") for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, year=year, period=human_to_period("1Y"), interval=human_to_interval("1d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=network.country, date_range=scada_range, network=network_schema, bom_station=bom_station, interval=human_to_interval("1M"), period=human_to_period("all"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) # Skip cases like wem/wem where region is supurfelous if len(network.regions) < 2: continue for region in network.regions: scada_range = get_scada_range(network=network_schema, network_region=region) bom_station = get_network_region_weather_station(region.code) if not scada_range: raise Exception("Require a scada range") export = StatExport( stat_type=StatType.power, priority=PriorityType.live, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, period=human_to_period("7d"), interval=network_schema.get_interval(), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) for year in range( datetime.now().year, scada_range.start.year - 1, -1, ): export = StatExport( stat_type=StatType.energy, priority=PriorityType.daily, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, year=year, period=human_to_period("1Y"), interval=human_to_interval("1d"), ) _exmap.append(export) export = StatExport( stat_type=StatType.energy, priority=PriorityType.monthly, country=network.country, date_range=scada_range, network=network_schema, network_region=region.code, bom_station=bom_station, period=human_to_period("all"), interval=human_to_interval("1M"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export_meta = StatMetadata( date_created=datetime.now(), version=get_version(), resources=_exmap ) return export_meta
from opennem.core.networks import ( NetworkAPVI, NetworkAU, NetworkNEM, NetworkSchema, NetworkWEM, network_from_network_code, ) from opennem.db import SessionLocal from opennem.db.models.opennem import Network from opennem.schema.time import TimeInterval, TimePeriod from opennem.utils.version import VersionPart, get_version logger = logging.getLogger(__name__) VERSION_MAJOR = get_version(version_part=VersionPart.MAJOR) STATS_FOLDER = "stats" class StatType(Enum): power = "power" energy = "energy" interchange = "interchange" gov = "gov" class PriorityType(Enum): live = 1 daily = 2 monthly = 3 history = 4
def generate_weekly_export_map() -> StatMetadata: """ Generate export map for weekly power series @TODO deconstruct this into separate methods and schema ex. network.get_scada_range(), network_region.get_bom_station() etc. """ session = get_scoped_session() networks = session.query(Network).filter( Network.export_set.is_(True)).all() if not networks: raise Exception("No networks") countries = list(set([network.country for network in networks])) _exmap = [] # Loop countries for country in countries: # @TODO derive this scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM]) if not scada_range: raise Exception("Require a scada range for NetworkAU") for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=country, network=NetworkAU, networks=[NetworkNEM, NetworkWEM], year=year, week=week, date_range=date_range_from_week(year, week, NetworkAU), interval=human_to_interval("30m"), period=human_to_period("7d"), ) _exmap.append(export) # Loop networks for network in networks: network_schema = network_from_network_code(network.code) scada_range = get_scada_range(network=network_schema) if not scada_range: raise Exception("Require a scada range for network: {}".format( network.code)) for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=network.country, network=network_schema, year=year, week=week, date_range=date_range_from_week(year, week, NetworkAU), interval=human_to_interval(f"{network.interval_size}m"), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) # Skip cases like wem/wem where region is supurfelous if len(network.regions) < 2: continue for region in network.regions: scada_range = get_scada_range(network=network_schema, network_region=region.code) if not scada_range: logger.error( "Require a scada range for network {} and region {}". format(network_schema.code, region.code)) continue for year, week in week_series(scada_range.end, scada_range.start): export = StatExport( stat_type=StatType.power, priority=PriorityType.history, country=network.country, network=network_schema, year=year, week=week, date_range=date_range_from_week( year, week, network_from_network_code(network.code)), interval=human_to_interval(f"{network.interval_size}m"), period=human_to_period("7d"), ) if network.code == "WEM": export.networks = [NetworkWEM, NetworkAPVI] export.network_region_query = "WEM" _exmap.append(export) export_meta = StatMetadata(date_created=datetime.now(), version=get_version(), resources=_exmap) return export_meta
# Kill warnings from various modules import warnings # noqa: E402 # Ignore noisy twisted deprecation warnings warnings.filterwarnings("ignore", category=DeprecationWarning, module="twisted") warnings.filterwarnings("ignore", module="openpyxl") # Core methods we reuire in loading the module from opennem.utils.version import get_version # noqa: E402 # Module variables v = "3.10.0" __env__ = "prod" __version__ = get_version() __package__ = "opennem" if __package__ not in sys.modules: raise Exception(f"Could not find {__package__} module") if sys.modules[__package__].__file__: MODULE_DIR_PATH = Path( sys.modules[__package__].__file__).parent # type: ignore else: MODULE_DIR_PATH = Path(__file__).parent DATA_DIR_PATH = MODULE_DIR_PATH / "data" # Setup console from rich.console import Console # noqa: E402
def export_electricitymap() -> None: date_range = get_scada_range(network=NetworkNEM) if not date_range.start: raise Exception("Could not get a scada range in EM export") interchange_stat = StatExport( stat_type=StatType.power, priority=PriorityType.live, country="au", date_range=date_range, network=NetworkNEM, interval=NetworkNEM.get_interval(), period=human_to_period("1d"), ) time_series = TimeSeries( start=date_range.start, end=date_range.end, network=interchange_stat.network, networks=[NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill], interval=interchange_stat.interval, period=interchange_stat.period, ) stat_set = power_flows_network_week(time_series=time_series) if not stat_set: raise Exception("No flow results for electricitymap export") em_set = OpennemDataSet(type="custom", version=get_version(), created_at=datetime.now(), data=[]) INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"] for ds in stat_set.data: if ds.code in INVERT_SETS: ds_inverted = invert_flow_set(ds) em_set.data.append(ds_inverted) logging.info("Inverted {}".format(ds.code)) else: em_set.data.append(ds) for region in ["NSW1", "QLD1", "VIC1", "TAS1", "SA1"]: power_set = power_week( time_series, region, include_capacities=True, include_code=False, networks_query=[ NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill ], ) if power_set: em_set.append_set(power_set) date_range = get_scada_range(network=NetworkWEM) # WEM custom time_series = TimeSeries( start=date_range.start, end=date_range.end, network=NetworkWEM, networks=[NetworkWEM, NetworkAPVI], interval=NetworkWEM.get_interval(), period=interchange_stat.period, ) power_set = power_week( time_series, "WEM", include_capacities=True, networks_query=[NetworkWEM, NetworkAPVI], include_code=False, ) if power_set: em_set.append_set(power_set) write_output("v3/clients/em/latest.json", em_set)
def export_all_monthly() -> None: session = get_scoped_session() all_monthly = OpennemDataSet(code="au", data=[], version=get_version(), created_at=datetime.now()) cpi = gov_stats_cpi() all_monthly.append_set(cpi) # Iterate networks and network regions networks = [NetworkNEM, NetworkWEM] for network in networks: network_regions = session.query(NetworkRegion).filter( NetworkRegion.network_id == network.code).all() for network_region in network_regions: networks = [] logging.info( "Exporting monthly for network {} and region {}".format( network.code, network_region.code)) if network_region.code == "WEM": networks = [NetworkWEM, NetworkAPVI] if network == NetworkNEM: networks = [NetworkNEM, NetworkAEMORooftop] logger.debug("Running monthlies for {} and {}".format( network.code, network_region.code)) scada_range: ScadaDateRange = get_scada_range(network=network, networks=networks, energy=True) if not scada_range or not scada_range.start: logger.error( "Could not get scada range for network {} and energy {}". format(network, True)) continue time_series = TimeSeries( start=scada_range.start, end=scada_range.end, network=network, interval=human_to_interval("1M"), period=human_to_period("all"), ) stat_set = energy_fueltech_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) if not stat_set: continue demand_energy_and_value = demand_network_region_daily( time_series=time_series, network_region_code=network_region.code, networks=networks) stat_set.append_set(demand_energy_and_value) if network == NetworkNEM: interconnector_flows = energy_interconnector_flows_and_emissions( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) stat_set.append_set(interconnector_flows) all_monthly.append_set(stat_set) bom_station = get_network_region_weather_station( network_region.code) if bom_station: try: weather_stats = weather_daily( time_series=time_series, station_code=bom_station, network_region=network_region.code, ) all_monthly.append_set(weather_stats) except Exception: pass write_output("v3/stats/au/all/monthly.json", all_monthly)
def export_all_monthly() -> None: session = SessionLocal() network_regions = session.query(NetworkRegion).all() all_monthly = OpennemDataSet(code="au", data=[], version=get_version(), created_at=datetime.now()) cpi = gov_stats_cpi() all_monthly.append_set(cpi) for network_region in network_regions: network = network_from_network_code(network_region.network.code) networks = None if network_region.code == "WEM": networks = [NetworkWEM, NetworkAPVI] scada_range: ScadaDateRange = get_scada_range(network=network, networks=networks) time_series = TimeSeries( start=scada_range.start, end=scada_range.end, network=network, interval=human_to_interval("1M"), period=human_to_period("all"), ) stat_set = energy_fueltech_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) if not stat_set: continue if network == NetworkNEM: interconnector_flows = energy_interconnector_region_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) stat_set.append_set(interconnector_flows) interconnector_emissions = energy_interconnector_emissions_region_daily( time_series=time_series, networks_query=networks, network_region_code=network_region.code, ) stat_set.append_set(interconnector_emissions) all_monthly.append_set(stat_set) bom_station = get_network_region_weather_station(network_region.code) if bom_station: weather_stats = weather_daily( time_series=time_series, station_code=bom_station, network_region=network_region.code, ) all_monthly.append_set(weather_stats) write_output("v3/stats/au/all/monthly.json", all_monthly)
__root_logger.addHandler(logging.StreamHandler()) __root_logger.handlers[0].setFormatter(__root_logger_formatter) PYTHON_VERSION = ".".join([ str(i) for i in (sys.version_info.major, sys.version_info.minor, sys.version_info.micro) ]) SYSTEM_STRING = platform() ENV = os.getenv("ENV", default="development") VERSION = None try: VERSION = get_version() except Exception: raise Exception("Could not get version") logging.info(f"Loading OpenNEM ENV {ENV}") logging.info( f"OpenNEM Version: {VERSION}. Python version: {PYTHON_VERSION}. System: {SYSTEM_STRING}" ) env_files = load_env_file(ENV) # Load the env files # @TODO add logging for _env_file in env_files: _env_full_path = Path(_env_file).resolve() logging.info("Loading env file: {}".format(_env_full_path))
class AEMOTableSet(BaseModel): version: str = get_version() generated: datetime = datetime.now() tables: List[AEMOTableSchema] = [] @property def table_names(self) -> List[str]: _names: List[str] = [] for _t in self.tables: _names.append(_t.full_name) return _names @staticmethod def from_json(filename: str) -> Self: """Parse an AEMOTableSet from a JSON file""" file_path = Path(filename) if not file_path.is_file(): raise Exception(f"File not found: {filename}") with file_path.open() as fh: json_data = json.loads(fh.read()) def has_table(self, table_name: str) -> bool: found_table: bool = False if not self.tables: return False if len(self.tables) < 1: return False table_lookup = list(filter(lambda t: t.full_name == table_name, self.tables)) if len(table_lookup) > 0: return True # if not found search by name only # @NOTE this might lead to bugs table_lookup = list(filter(lambda t: t.name == table_name, self.tables)) if len(table_lookup) > 0: return True return found_table def add_table(self, table: AEMOTableSchema, values_only: bool = False) -> bool: _existing_table = self.get_table(table.full_name) if _existing_table: for r in table.records: _existing_table.add_record(r, values_only=values_only) else: self.tables.append(table) return True def get_table(self, table_name: str) -> Optional[AEMOTableSchema]: if not self.has_table(table_name): return None table_lookup = list(filter(lambda t: t.full_name == table_name, self.tables)) if table_lookup: return table_lookup.pop() table_lookup = list(filter(lambda t: t.name == table_name, self.tables)) if table_lookup: return table_lookup.pop() logger.debug("Looking up table: {} amongst ({})".format(table_name, ", ".join([i.name for i in self.tables]))) return None