Exemplo n.º 1
0
def export_flows() -> None:
    date_range = get_scada_range(network=NetworkNEM)

    interchange_stat = StatExport(
        stat_type=StatType.power,
        priority=PriorityType.live,
        country="au",
        date_range=date_range,
        network=NetworkNEM,
        interval=NetworkNEM.get_interval(),
        period=human_to_period("7d"),
    )

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=interchange_stat.network,
        interval=interchange_stat.interval,
        period=interchange_stat.period,
    )

    stat_set = power_flows_network_week(time_series=time_series)

    if stat_set:
        write_output(
            f"v3/stats/au/{interchange_stat.network.code}/flows/7d.json",
            stat_set)
Exemplo n.º 2
0
def export_historic_intervals(limit: int | None = None) -> None:
    """ """
    session = get_scoped_session()

    networks = [NetworkNEM, NetworkWEM]

    for network in networks:
        network_regions: list[NetworkRegion] = (
            session.query(NetworkRegion).filter(
                NetworkRegion.network_id == network.code).all())

        for network_region in network_regions:
            scada_range: ScadaDateRange = get_scada_range(network=network,
                                                          networks=networks,
                                                          energy=False)

            if not scada_range or not scada_range.start:
                logger.error(
                    "Could not get scada range for network {}".format(network))
                continue

            for week_start, week_end in week_series_datetimes(
                    start=scada_range.end, end=scada_range.start,
                    length=limit):
                if week_start > get_today_opennem():
                    continue

                try:
                    export_network_intervals_for_week(
                        week_start,
                        week_end,
                        network=network,
                        network_region=network_region)
                except Exception as e:
                    logger.error(f"export_historic_intervals error: {e}")
Exemplo n.º 3
0
def run_aggregates_facility_all(network: NetworkSchema) -> None:
    scada_range: ScadaDateRange = get_scada_range(network=network)

    if not scada_range:
        logger.error("Could not find a scada range for {}".format(
            network.code))
        return None

    exec_aggregates_facility_daily_query(date_min=scada_range.start,
                                         date_max=scada_range.end,
                                         network=network)
Exemplo n.º 4
0
def get_date_range(network: NetworkSchema) -> DatetimeRange:
    date_range = get_scada_range(network=NetworkNEM)

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        interval=human_to_interval("1d"),
        period=human_to_period("all"),
        network=network,
    )

    return time_series.get_range()
Exemplo n.º 5
0
def generate_weekly_export_map() -> StatMetadata:
    """
    Generate export map for weekly power series

    @TODO deconstruct this into separate methods and schema
    ex. network.get_scada_range(), network_region.get_bom_station() etc.
    """
    session = get_scoped_session()

    networks = session.query(Network).filter(
        Network.export_set.is_(True)).all()

    if not networks:
        raise Exception("No networks")

    countries = list(set([network.country for network in networks]))

    _exmap = []

    # Loop countries
    for country in countries:
        # @TODO derive this
        scada_range = get_scada_range(network=NetworkAU,
                                      networks=[NetworkNEM, NetworkWEM])

        if not scada_range:
            raise Exception("Require a scada range for NetworkAU")

        for year, week in week_series(scada_range.end, scada_range.start):
            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.history,
                country=country,
                network=NetworkAU,
                networks=[NetworkNEM, NetworkWEM],
                year=year,
                week=week,
                date_range=date_range_from_week(year, week, NetworkAU),
                interval=human_to_interval("30m"),
                period=human_to_period("7d"),
            )
            _exmap.append(export)

    # Loop networks
    for network in networks:
        network_schema = network_from_network_code(network.code)
        scada_range = get_scada_range(network=network_schema)

        if not scada_range:
            raise Exception("Require a scada range for network: {}".format(
                network.code))

        for year, week in week_series(scada_range.end, scada_range.start):
            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.history,
                country=network.country,
                network=network_schema,
                year=year,
                week=week,
                date_range=date_range_from_week(year, week, NetworkAU),
                interval=human_to_interval(f"{network.interval_size}m"),
                period=human_to_period("7d"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

        # Skip cases like wem/wem where region is supurfelous
        if len(network.regions) < 2:
            continue

        for region in network.regions:
            scada_range = get_scada_range(network=network_schema,
                                          network_region=region.code)

            if not scada_range:
                logger.error(
                    "Require a scada range for network {} and region {}".
                    format(network_schema.code, region.code))
                continue

            for year, week in week_series(scada_range.end, scada_range.start):
                export = StatExport(
                    stat_type=StatType.power,
                    priority=PriorityType.history,
                    country=network.country,
                    network=network_schema,
                    year=year,
                    week=week,
                    date_range=date_range_from_week(
                        year, week, network_from_network_code(network.code)),
                    interval=human_to_interval(f"{network.interval_size}m"),
                    period=human_to_period("7d"),
                )

                if network.code == "WEM":
                    export.networks = [NetworkWEM, NetworkAPVI]
                    export.network_region_query = "WEM"

                _exmap.append(export)

    export_meta = StatMetadata(date_created=datetime.now(),
                               version=get_version(),
                               resources=_exmap)

    return export_meta
Exemplo n.º 6
0
def export_electricitymap() -> None:
    date_range = get_scada_range(network=NetworkNEM)

    if not date_range.start:
        raise Exception("Could not get a scada range in EM export")

    interchange_stat = StatExport(
        stat_type=StatType.power,
        priority=PriorityType.live,
        country="au",
        date_range=date_range,
        network=NetworkNEM,
        interval=NetworkNEM.get_interval(),
        period=human_to_period("1d"),
    )

    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=interchange_stat.network,
        networks=[NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill],
        interval=interchange_stat.interval,
        period=interchange_stat.period,
    )

    stat_set = power_flows_network_week(time_series=time_series)

    if not stat_set:
        raise Exception("No flow results for electricitymap export")

    em_set = OpennemDataSet(type="custom",
                            version=get_version(),
                            created_at=datetime.now(),
                            data=[])

    INVERT_SETS = ["VIC1->NSW1", "VIC1->SA1"]

    for ds in stat_set.data:
        if ds.code in INVERT_SETS:
            ds_inverted = invert_flow_set(ds)
            em_set.data.append(ds_inverted)
            logging.info("Inverted {}".format(ds.code))
        else:
            em_set.data.append(ds)

    for region in ["NSW1", "QLD1", "VIC1", "TAS1", "SA1"]:
        power_set = power_week(
            time_series,
            region,
            include_capacities=True,
            include_code=False,
            networks_query=[
                NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill
            ],
        )

        if power_set:
            em_set.append_set(power_set)

    date_range = get_scada_range(network=NetworkWEM)

    # WEM custom
    time_series = TimeSeries(
        start=date_range.start,
        end=date_range.end,
        network=NetworkWEM,
        networks=[NetworkWEM, NetworkAPVI],
        interval=NetworkWEM.get_interval(),
        period=interchange_stat.period,
    )

    power_set = power_week(
        time_series,
        "WEM",
        include_capacities=True,
        networks_query=[NetworkWEM, NetworkAPVI],
        include_code=False,
    )

    if power_set:
        em_set.append_set(power_set)

    write_output("v3/clients/em/latest.json", em_set)
Exemplo n.º 7
0
def export_power(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export power stats from the export map


    """

    # Not passed a stat map so go and get one
    if not stats:
        export_map = None

        if priority and priority == PriorityType.history:
            export_map = get_weekly_export_map()

        else:
            export_map = get_export_map().get_by_stat_type(StatType.power)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    output_count: int = 0

    logger.info("Running {}export {} with {} stats".format(
        "latest " if latest else "", priority, len(stats)))

    for power_stat in stats:
        if power_stat.stat_type != StatType.power:
            continue

        if output_count >= 1 and latest:
            return None

        date_range_networks = power_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=power_stat.network, networks=date_range_networks)

        logger.debug("Date range for {}: {} => {}".format(
            power_stat.network.code, date_range.start, date_range.end))

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=power_stat.network,
            year=power_stat.year,
            interval=power_stat.interval,
            period=power_stat.period,
        )

        stat_set = power_week(
            time_series=time_series,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
            networks_query=power_stat.networks,
        )

        if not stat_set:
            logger.info("No power stat set for {} {} {}".format(
                power_stat.period,
                power_stat.networks,
                power_stat.network_region,
            ))
            continue

        demand_set = demand_week(
            time_series=time_series,
            networks_query=power_stat.networks,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
        )

        stat_set.append_set(demand_set)

        if power_stat.network_region:
            flow_set = power_flows_region_week(
                time_series=time_series,
                network_region_code=power_stat.network_region,
            )

            if flow_set:
                stat_set.append_set(flow_set)

        time_series_weather = time_series.copy()
        time_series_weather.interval = human_to_interval("30m")

        if power_stat.bom_station:
            try:
                weather_set = weather_daily(
                    time_series=time_series_weather,
                    station_code=power_stat.bom_station,
                    network_region=power_stat.network_region,
                    include_min_max=False,
                    unit_name="temperature",
                )
                stat_set.append_set(weather_set)
            except Exception:
                pass

        write_output(power_stat.path, stat_set)
        output_count += 1
Exemplo n.º 8
0
def export_all_daily(
    networks: List[NetworkSchema] = [NetworkNEM, NetworkWEM],
    network_region_code: Optional[str] = None,
) -> None:
    session = get_scoped_session()

    cpi = gov_stats_cpi()

    for network in networks:
        network_regions = session.query(NetworkRegion).filter_by(
            export_set=True).filter_by(network_id=network.code)

        if network_region_code:
            network_regions = network_regions.filter_by(
                code=network_region_code)

        network_regions = network_regions.all()

        for network_region in network_regions:

            logging.info("Exporting for network {} and region {}".format(
                network.code, network_region.code))

            networks = [
                NetworkNEM, NetworkAEMORooftop, NetworkAEMORooftopBackfill
            ]

            if network_region.code == "WEM":
                networks = [NetworkWEM, NetworkAPVI]

            scada_range: ScadaDateRange = get_scada_range(network=network,
                                                          networks=networks,
                                                          energy=True)

            if not scada_range or not scada_range.start:
                logger.error(
                    "Could not get scada range for network {} and energy {}".
                    format(network, True))
                continue

            time_series = TimeSeries(
                start=scada_range.start,
                end=scada_range.end,
                network=network,
                interval=human_to_interval("1d"),
                period=human_to_period("all"),
            )

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=network_region.code,
                networks=networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if network == NetworkNEM:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=networks,
                    network_region_code=network_region.code,
                )
                stat_set.append_set(interconnector_flows)

            bom_station = get_network_region_weather_station(
                network_region.code)

            if bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=bom_station,
                        network_region=network_region.code,
                    )
                    stat_set.append_set(weather_stats)
                except Exception:
                    pass

            if cpi:
                stat_set.append_set(cpi)

            write_output(f"v3/stats/au/{network_region.code}/daily.json",
                         stat_set)
Exemplo n.º 9
0
def export_all_monthly() -> None:
    session = get_scoped_session()

    all_monthly = OpennemDataSet(code="au",
                                 data=[],
                                 version=get_version(),
                                 created_at=datetime.now())

    cpi = gov_stats_cpi()
    all_monthly.append_set(cpi)

    # Iterate networks and network regions
    networks = [NetworkNEM, NetworkWEM]

    for network in networks:
        network_regions = session.query(NetworkRegion).filter(
            NetworkRegion.network_id == network.code).all()

        for network_region in network_regions:
            networks = []

            logging.info(
                "Exporting monthly for network {} and region {}".format(
                    network.code, network_region.code))

            if network_region.code == "WEM":
                networks = [NetworkWEM, NetworkAPVI]

            if network == NetworkNEM:
                networks = [NetworkNEM, NetworkAEMORooftop]

            logger.debug("Running monthlies for {} and {}".format(
                network.code, network_region.code))

            scada_range: ScadaDateRange = get_scada_range(network=network,
                                                          networks=networks,
                                                          energy=True)

            if not scada_range or not scada_range.start:
                logger.error(
                    "Could not get scada range for network {} and energy {}".
                    format(network, True))
                continue

            time_series = TimeSeries(
                start=scada_range.start,
                end=scada_range.end,
                network=network,
                interval=human_to_interval("1M"),
                period=human_to_period("all"),
            )

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=network_region.code,
                networks=networks)
            stat_set.append_set(demand_energy_and_value)

            if network == NetworkNEM:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=networks,
                    network_region_code=network_region.code,
                )
                stat_set.append_set(interconnector_flows)

            all_monthly.append_set(stat_set)

            bom_station = get_network_region_weather_station(
                network_region.code)

            if bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=bom_station,
                        network_region=network_region.code,
                    )
                    all_monthly.append_set(weather_stats)
                except Exception:
                    pass

    write_output("v3/stats/au/all/monthly.json", all_monthly)
Exemplo n.º 10
0
def station_observations_api(
        station_code: str = Query(None, description="Station code"),
        interval_human: str = Query("15m", description="Interval"),
        period_human: str = Query("7d", description="Period"),
        station_codes: List[str] = [],
        network_code: str = "NEM",
        timezone: str = None,
        offset: str = None,
        year: int = None,
        engine=Depends(get_database_engine),
) -> OpennemDataSet:
    units = get_unit("temperature")

    if not interval_human:
        interval = "15m"

    if not period_human:
        period = "7d"

    network = None

    if network_code:
        network = network_from_network_code(network_code)

    if station_code:
        station_codes = [station_code]

    interval = human_to_interval(interval_human)
    period = human_to_period(period_human)

    if timezone:
        timezone = pytz.timezone(timezone)

    if offset:
        timezone = get_fixed_timezone(offset)

    scada_range = None

    if network:
        scada_range = get_scada_range(network=network)

    query = observation_query(
        station_codes=station_codes,
        interval=interval,
        period=period,
        network=network,
        scada_range=scada_range,
        year=year,
    )

    with engine.connect() as c:
        results = list(c.execute(query))

    stats = [
        DataQueryResult(interval=i[0],
                        result=i[2],
                        group_by=i[1] if len(i) > 1 else None) for i in results
    ]

    if len(stats) < 1:
        raise HTTPException(
            status_code=status.HTTP_404_NOT_FOUND,
            detail="Station stats not found",
        )

    result = stats_factory(
        stats=stats,
        units=units,
        interval=interval,
        period=period,
        network=network,
        code="bom",
        group_field="temperature",
    )

    return result
Exemplo n.º 11
0
def export_all_daily() -> None:
    session = SessionLocal()
    network_regions = session.query(NetworkRegion).all()

    cpi = gov_stats_cpi()

    for network_region in network_regions:
        network = network_from_network_code(network_region.network.code)
        networks = None

        if network_region.code == "WEM":
            networks = [NetworkWEM, NetworkAPVI]

        scada_range: ScadaDateRange = get_scada_range(network=network,
                                                      networks=networks)

        time_series = TimeSeries(
            start=scada_range.start,
            end=scada_range.end,
            network=network,
            interval=human_to_interval("1d"),
            period=human_to_period("all"),
        )

        stat_set = energy_fueltech_daily(
            time_series=time_series,
            networks_query=networks,
            network_region_code=network_region.code,
        )

        if not stat_set:
            continue

        # Hard coded to NEM only atm but we'll put has_interconnectors
        # in the metadata to automate all this
        if network == NetworkNEM:
            interconnector_flows = energy_interconnector_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_flows)

            interconnector_emissions = energy_interconnector_emissions_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_emissions)

        bom_station = get_network_region_weather_station(network_region.code)

        if bom_station:
            weather_stats = weather_daily(
                time_series=time_series,
                station_code=bom_station,
                network_region=network_region.code,
            )
            stat_set.append_set(weather_stats)

        if cpi:
            stat_set.append_set(cpi)

        write_output(f"v3/stats/au/{network_region.code}/daily.json", stat_set)
Exemplo n.º 12
0
def export_all_monthly() -> None:
    session = SessionLocal()
    network_regions = session.query(NetworkRegion).all()

    all_monthly = OpennemDataSet(code="au",
                                 data=[],
                                 version=get_version(),
                                 created_at=datetime.now())

    cpi = gov_stats_cpi()
    all_monthly.append_set(cpi)

    for network_region in network_regions:
        network = network_from_network_code(network_region.network.code)
        networks = None

        if network_region.code == "WEM":
            networks = [NetworkWEM, NetworkAPVI]

        scada_range: ScadaDateRange = get_scada_range(network=network,
                                                      networks=networks)

        time_series = TimeSeries(
            start=scada_range.start,
            end=scada_range.end,
            network=network,
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )

        stat_set = energy_fueltech_daily(
            time_series=time_series,
            networks_query=networks,
            network_region_code=network_region.code,
        )

        if not stat_set:
            continue

        if network == NetworkNEM:
            interconnector_flows = energy_interconnector_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_flows)

            interconnector_emissions = energy_interconnector_emissions_region_daily(
                time_series=time_series,
                networks_query=networks,
                network_region_code=network_region.code,
            )
            stat_set.append_set(interconnector_emissions)

        all_monthly.append_set(stat_set)

        bom_station = get_network_region_weather_station(network_region.code)

        if bom_station:
            weather_stats = weather_daily(
                time_series=time_series,
                station_code=bom_station,
                network_region=network_region.code,
            )
            all_monthly.append_set(weather_stats)

    write_output("v3/stats/au/all/monthly.json", all_monthly)
Exemplo n.º 13
0
def export_energy(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export energy stats from the export map


    """
    if not stats:
        export_map = get_export_map().get_by_stat_type(StatType.energy)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    CURRENT_YEAR = datetime.now().year

    for energy_stat in stats:
        if energy_stat.stat_type != StatType.energy:
            continue

        # @FIX trim to NEM since it's the one with the shortest
        # data time span.
        # @TODO find a better and more flexible way to do this in the
        # range method
        date_range_networks = energy_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=energy_stat.network, networks=date_range_networks)

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=energy_stat.network,
            year=energy_stat.year,
            interval=energy_stat.interval,
            period=human_to_period("1Y"),
        )

        if energy_stat.year:

            if latest and energy_stat.year != CURRENT_YEAR:
                continue

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_region_daily(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

                interconnector_emissions = energy_interconnector_emissions_region_daily(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_emissions)

            if energy_stat.bom_station:
                weather_stats = weather_daily(
                    time_series=time_series,
                    station_code=energy_stat.bom_station,
                    network_region=energy_stat.network_region,
                )
                stat_set.append_set(weather_stats)

            write_output(energy_stat.path, stat_set)

        elif energy_stat.period and energy_stat.period.period_human == "all" and not latest:
            time_series.period = human_to_period("all")
            time_series.interval = human_to_interval("1M")
            time_series.year = None

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_region_daily(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

                interconnector_emissions = energy_interconnector_emissions_region_daily(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_emissions)

            if energy_stat.bom_station:
                weather_stats = weather_daily(
                    time_series=time_series,
                    station_code=energy_stat.bom_station,
                    network_region=energy_stat.network_region,
                )
                stat_set.append_set(weather_stats)

            stat_set = pad_stat_set(stat_set)

            write_output(energy_stat.path, stat_set)
Exemplo n.º 14
0
def power_facility_query(
    facility_codes: List[str],
    network: NetworkSchema,
    period: TimePeriod,
    interval: Optional[TimeInterval] = None,
    date_range: Optional[ScadaDateRange] = None,
) -> str:
    timezone = network.get_timezone(postgres_format=True)

    if not date_range:
        date_range = get_scada_range(network=network,
                                     facilities=facility_codes)

    timezone = network.timezone_database

    __query = """
        select
            t.trading_interval at time zone '{timezone}',
            coalesce(avg(t.facility_power), 0),
            t.facility_code
        from (
            select
                time_bucket_gapfill('{trunc}', fs.trading_interval) AS trading_interval,
                coalesce(
                    avg(fs.generated), 0
                ) as facility_power,
                fs.facility_code
            from facility_scada fs
            join facility f on fs.facility_code = f.code
            where
                fs.trading_interval <= '{date_max}' and
                fs.trading_interval > '{date_min}' and
                fs.facility_code in ({facility_codes_parsed})
            group by 1, 3
        ) as t
        group by 1, 3
        order by 1 desc
    """

    if not interval:
        interval = network.get_interval()

    if not date_range:
        raise Exception("Require a date range for query")

    if not interval:
        raise Exception("Require an interval")

    date_max = date_range.get_end()
    date_min = date_range.get_start()

    if period:
        date_min = date_range.get_end() - timedelta(minutes=period.period)

    query = __query.format(
        facility_codes_parsed=duid_in_case(facility_codes),
        trunc=interval.interval_sql,
        period=period.period_sql,
        timezone=timezone,
        date_max=date_max,
        date_min=date_min,
    )

    return query
Exemplo n.º 15
0
def energy_facility_query(
    facility_codes: List[str],
    network: NetworkSchema,
    period: TimePeriod,
    interval: Optional[TimeInterval] = None,
) -> str:
    """
    Get Energy for a list of facility codes
    """

    __query = """
    select
        date_trunc('{trunc}', t.trading_interval at time zone '{timezone}') as trading_day,
        t.code,
        sum(t.energy) / 1000 as fueltech_energy,
        sum(t.market_value) as fueltech_market_value,
        sum(t.emissions) as fueltech_emissions
    from mv_facility_all t
    where
        t.trading_interval <= '{date_max}' and
        t.trading_interval >= '{date_min}' and
        t.code in ({facility_codes_parsed})
    group by 1, 2
    order by
        trading_day desc;
    """

    timezone = network.timezone_database
    offset = network.get_timezone(postgres_format=True)

    date_range: ScadaDateRange = get_scada_range(network=network,
                                                 facilities=facility_codes)

    if not interval:
        interval = network.get_interval()

    if not date_range:
        raise Exception("Require a date range for query")

    if not period:
        raise Exception("Require a period")

    if not interval:
        interval = network.get_interval()

    if not interval:
        raise Exception("Require an interval")

    trunc = interval.trunc

    date_max = date_range.get_end()
    date_min = date_range.get_start()

    if period.period_human == "1M":
        date_min = date_range.get_end() - timedelta(minutes=period.period)
    elif period.period_human == "1Y":
        # might have to do +offset times
        year = datetime.now().year
        date_min = "{}-01-01 00:00:00{}".format(year, offset)
    elif period.period_human in ["7d", "5Y", "10Y"]:
        date_min = date_range.get_end() - timedelta(minutes=period.period)

    # elif period.period_human == "all":
    # else:
    # date_min = date_range.get_end() - timedelta(minutes=period.period)

    query = dedent(
        __query.format(
            facility_codes_parsed=duid_in_case(facility_codes),
            trunc=trunc,
            date_max=date_max,
            date_min=date_min,
            timezone=timezone,
        ))
    return query
Exemplo n.º 16
0
def export_power(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export power stats from the export map


    """
    if not stats:
        export_map = get_export_map().get_by_stat_type(StatType.power)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    output_count: int = 0

    for power_stat in stats:
        if power_stat.stat_type != StatType.power:
            continue

        if output_count >= 1 and latest:
            return None

        date_range_networks = power_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=power_stat.network, networks=date_range_networks)

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=power_stat.network,
            year=power_stat.year,
            interval=power_stat.interval,
            period=power_stat.period,
        )

        stat_set = power_week(
            time_series=time_series,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
            networks_query=power_stat.networks,
        )

        if not stat_set:
            logger.info("No power stat set for {} {} {}".format(
                power_stat.period,
                power_stat.networks,
                power_stat.network_region,
            ))
            continue

        demand_set = demand_week(
            time_series=time_series,
            networks_query=power_stat.networks,
            network_region_code=power_stat.network_region_query
            or power_stat.network_region,
        )

        stat_set.append_set(demand_set)

        if power_stat.network_region:
            flow_set = power_flows_week(
                time_series=time_series,
                network_region_code=power_stat.network_region,
            )

            if flow_set:
                stat_set.append_set(flow_set)

        if power_stat.bom_station:
            weather_set = weather_daily(
                time_series=time_series,
                station_code=power_stat.bom_station,
                network_region=power_stat.network_region,
                include_min_max=False,
                unit_name="temperature",
            )

            stat_set.append_set(weather_set)

        write_output(power_stat.path, stat_set)
        output_count += 1
Exemplo n.º 17
0
def get_export_map() -> StatMetadata:
    """
    Generates a map of all export JSONs

    """
    session = SessionLocal()

    networks = session.query(Network).filter(Network.export_set.is_(True)).all()

    if not networks:
        raise Exception("No networks")

    countries = list(set([network.country for network in networks]))

    _exmap = []

    for country in countries:
        # @TODO derive this
        scada_range = get_scada_range(network=NetworkAU, networks=[NetworkNEM, NetworkWEM])

        if not scada_range:
            raise Exception("Require a scada range")

        export = StatExport(
            stat_type=StatType.power,
            priority=PriorityType.live,
            country=country,
            date_range=scada_range,
            network=NetworkAU,
            networks=[NetworkNEM, NetworkWEM],
            interval=NetworkAU.get_interval(),
            period=human_to_period("7d"),
        )

        _exmap.append(export)

        for year in range(
            datetime.now().year,
            scada_range.start.year - 1,
            -1,
        ):
            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.daily,
                country=country,
                date_range=scada_range,
                network=NetworkAU,
                networks=[NetworkNEM, NetworkWEM],
                year=year,
                interval=human_to_interval("1d"),
                period=human_to_period("1Y"),
            )
            _exmap.append(export)

        export = StatExport(
            stat_type=StatType.energy,
            priority=PriorityType.monthly,
            country=country,
            date_range=scada_range,
            network=NetworkAU,
            networks=[NetworkNEM, NetworkWEM],
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )
        _exmap.append(export)

    for network in networks:
        network_schema = network_from_network_code(network.code)
        scada_range = get_scada_range(network=network_schema)
        bom_station = get_network_region_weather_station(network.code)

        export = StatExport(
            stat_type=StatType.power,
            priority=PriorityType.live,
            country=network.country,
            date_range=scada_range,
            network=network_schema,
            bom_station=bom_station,
            interval=network_schema.get_interval(),
            period=human_to_period("7d"),
        )

        if network.code == "WEM":
            export.networks = [NetworkWEM, NetworkAPVI]
            export.network_region_query = "WEM"

        _exmap.append(export)

        if not scada_range:
            raise Exception("Require a scada range")

        for year in range(
            datetime.now().year,
            scada_range.start.year - 1,
            -1,
        ):
            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.daily,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                bom_station=bom_station,
                year=year,
                period=human_to_period("1Y"),
                interval=human_to_interval("1d"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

        export = StatExport(
            stat_type=StatType.energy,
            priority=PriorityType.monthly,
            country=network.country,
            date_range=scada_range,
            network=network_schema,
            bom_station=bom_station,
            interval=human_to_interval("1M"),
            period=human_to_period("all"),
        )

        if network.code == "WEM":
            export.networks = [NetworkWEM, NetworkAPVI]
            export.network_region_query = "WEM"

        _exmap.append(export)

        # Skip cases like wem/wem where region is supurfelous
        if len(network.regions) < 2:
            continue

        for region in network.regions:
            scada_range = get_scada_range(network=network_schema, network_region=region)
            bom_station = get_network_region_weather_station(region.code)

            if not scada_range:
                raise Exception("Require a scada range")

            export = StatExport(
                stat_type=StatType.power,
                priority=PriorityType.live,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                network_region=region.code,
                bom_station=bom_station,
                period=human_to_period("7d"),
                interval=network_schema.get_interval(),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

            for year in range(
                datetime.now().year,
                scada_range.start.year - 1,
                -1,
            ):
                export = StatExport(
                    stat_type=StatType.energy,
                    priority=PriorityType.daily,
                    country=network.country,
                    date_range=scada_range,
                    network=network_schema,
                    network_region=region.code,
                    bom_station=bom_station,
                    year=year,
                    period=human_to_period("1Y"),
                    interval=human_to_interval("1d"),
                )
                _exmap.append(export)

            export = StatExport(
                stat_type=StatType.energy,
                priority=PriorityType.monthly,
                country=network.country,
                date_range=scada_range,
                network=network_schema,
                network_region=region.code,
                bom_station=bom_station,
                period=human_to_period("all"),
                interval=human_to_interval("1M"),
            )

            if network.code == "WEM":
                export.networks = [NetworkWEM, NetworkAPVI]
                export.network_region_query = "WEM"

            _exmap.append(export)

    export_meta = StatMetadata(
        date_created=datetime.now(), version=get_version(), resources=_exmap
    )

    return export_meta
Exemplo n.º 18
0
def export_energy(
    stats: List[StatExport] = None,
    priority: Optional[PriorityType] = None,
    latest: Optional[bool] = False,
) -> None:
    """
    Export energy stats from the export map


    """
    if not stats:
        export_map = get_export_map().get_by_stat_type(StatType.energy)

        if priority:
            export_map = export_map.get_by_priority(priority)

        stats = export_map.resources

    CURRENT_YEAR = datetime.now().year

    logger.info(f"Running export_energy with {len(stats)} stats")

    for energy_stat in stats:
        if energy_stat.stat_type != StatType.energy:
            continue

        # @FIX trim to NEM since it's the one with the shortest
        # data time span.
        # @TODO find a better and more flexible way to do this in the
        # range method
        date_range_networks = energy_stat.networks or []

        if NetworkNEM in date_range_networks:
            date_range_networks = [NetworkNEM]

        date_range: ScadaDateRange = get_scada_range(
            network=energy_stat.network,
            networks=date_range_networks,
            energy=True)

        if not date_range:
            logger.error(
                "Skipping - Could not get date range for energy {} {}".format(
                    energy_stat.network, date_range_networks))
            continue

        logger.debug("Date range is: {} {} => {}".format(
            energy_stat.network.code, date_range.start, date_range.end))

        # Migrate to this time_series
        time_series = TimeSeries(
            start=date_range.start,
            end=date_range.end,
            network=energy_stat.network,
            year=energy_stat.year,
            interval=energy_stat.interval,
            period=human_to_period("1Y"),
        )

        if energy_stat.year:

            if latest and energy_stat.year != CURRENT_YEAR:
                continue

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=energy_stat.network_region,
                networks=energy_stat.networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

            if energy_stat.bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=energy_stat.bom_station,
                        network_region=energy_stat.network_region,
                    )
                    stat_set.append_set(weather_stats)
                except NoResults as e:
                    logger.info("No results for weather result: {}".format(e))
                except Exception as e:
                    logger.error("weather_stat exception: {}".format(e))
                    pass
            else:
                logger.info("Stat set has no bom station")

            write_output(energy_stat.path, stat_set)

        elif energy_stat.period and energy_stat.period.period_human == "all" and not latest:
            time_series.period = human_to_period("all")
            time_series.interval = human_to_interval("1M")
            time_series.year = None

            stat_set = energy_fueltech_daily(
                time_series=time_series,
                networks_query=energy_stat.networks,
                network_region_code=energy_stat.network_region_query
                or energy_stat.network_region,
            )

            if not stat_set:
                continue

            demand_energy_and_value = demand_network_region_daily(
                time_series=time_series,
                network_region_code=energy_stat.network_region,
                networks=energy_stat.networks)
            stat_set.append_set(demand_energy_and_value)

            # Hard coded to NEM only atm but we'll put has_interconnectors
            # in the metadata to automate all this
            if energy_stat.network == NetworkNEM and energy_stat.network_region:
                interconnector_flows = energy_interconnector_flows_and_emissions(
                    time_series=time_series,
                    networks_query=energy_stat.networks,
                    network_region_code=energy_stat.network_region_query
                    or energy_stat.network_region,
                )
                stat_set.append_set(interconnector_flows)

            if energy_stat.bom_station:
                try:
                    weather_stats = weather_daily(
                        time_series=time_series,
                        station_code=energy_stat.bom_station,
                        network_region=energy_stat.network_region,
                    )
                    stat_set.append_set(weather_stats)
                except NoResults as e:
                    logger.info("No weather results: {}".format(e))
                except Exception:
                    pass

            write_output(energy_stat.path, stat_set)