def net_flows_emissions( region: str, data: List[RegionFlowEmissionsResult], interval: TimeInterval, ) -> Dict[str, List[DataQueryResult]]: """ Calculates net region flow emissions for a region from a RegionFlowResult """ output_set = {} for k, v in groupby(data, lambda x: truncate(x.interval, interval.trunc)): values = list(v) if k not in output_set: output_set[k] = { "imports": 0.0, "exports": 0.0, } export_emissions_sum = 0.0 import_emissions_sum = 0.0 # Sum up for es in values: if not es.flow_from: continue if es.flow_from == region: if es.energy > 0: if es.flow_from_intensity: export_emissions_sum += abs(es.flow_from_emissions) else: if es.flow_to_emissions: import_emissions_sum += abs(es.flow_to_emissions) if es.flow_to == region: if es.energy < 0: if es.flow_from_emissions: export_emissions_sum += abs(es.flow_from_emissions) else: if es.flow_to_emissions: import_emissions_sum += abs(es.flow_to_emissions) output_set[k]["imports"] = import_emissions_sum output_set[k]["exports"] = export_emissions_sum imports_list = [] exports_list = [] for interval, data in output_set.items(): imports_list.append( DataQueryResult(interval=interval, group_by="imports", result=data["imports"]) ) exports_list.append( DataQueryResult(interval=interval, group_by="exports", result=data["exports"]) ) return {"imports": imports_list, "exports": exports_list}
def power_flows_week( time_series: TimeSeries, network_region_code: str, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = interconnector_power_flow(time_series=time_series, network_region=network_region_code) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: raise Exception("No results from query: {}".format(query)) imports = [ DataQueryResult(interval=i[0], result=i[2], group_by="imports" if len(i) > 1 else None) for i in row ] exports = [ DataQueryResult(interval=i[0], result=i[3], group_by="exports" if len(i) > 1 else None) for i in row ] result = stats_factory( imports, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("power"), region=network_region_code, fueltech_group=True, ) if not result: raise Exception("No results") result_exports = stats_factory( exports, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("power"), region=network_region_code, fueltech_group=True, ) result.append_set(result_exports) return result
def gov_stats_cpi() -> Optional[OpennemDataSet]: engine = get_database_engine() query = country_stats_query(StatTypes.CPI) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[1], group_by=i[2] if len(i) > 1 else None) for i in row ] if len(stats) < 1: logger.error("No results for gov_stats_cpi returing blank set") return None result = stats_factory( stats, code="au.cpi", network=NetworkNEM, interval=human_to_interval("1Q"), period=human_to_period("all"), units=get_unit("cpi"), group_field="gov", ) return result
def demand_week( time_series: TimeSeries, network_region_code: Optional[str], networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = network_demand_query( time_series=time_series, network_region=network_region_code, networks_query=networks_query, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: logger.error( "No results from network_demand_query with {}".format(time_series)) return None demand = [ DataQueryResult(interval=i[0], result=i[2], group_by="demand" if len(i) > 1 else None) for i in row ] result = stats_factory( demand, # code=network_region_code or network.code, network=time_series.network, period=human_to_period("7d"), interval=human_to_interval("5m"), units=get_unit("demand"), region=network_region_code, ) if not result: logger.error( "No results from network_demand_query with {}".format(time_series)) return None return result
def emissions_for_network_interval( time_series: TimeSeries, network_region_code: str = None, ) -> OpennemDataSet | None: engine = get_database_engine() if network_region_code and not re.match(_valid_region, network_region_code): raise OpenNEMInvalidNetworkRegion() query = emission_network_fueltech_query( time_series=time_series, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] if len(stats) < 1: logger.error( "No results from emissions_for_network_interval query with {}". format(time_series)) return None result = stats_factory( stats, # code=network_region_code or network.code, network=time_series.network, interval=time_series.interval, period=time_series.period, units=get_unit("emissions"), region=network_region_code, fueltech_group=True, ) return result
def get_power_example() -> OpennemDataSet: network = network_from_network_code("NEM") interval = human_to_interval("5m") units = get_unit("power") period = human_to_period("7d") network_region_code = "NSW1" test_rows = [] dt = datetime.fromisoformat("2021-01-15 10:00:00") for ft in ["coal_black", "coal_brown"]: for v in range(0, 3): test_rows.append([dt, ft, v]) dt = dt + timedelta(minutes=5) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in test_rows ] assert len(stats) == 6, "Should have 6 stats" result = stats_factory( stats, code=network_region_code or network.code, network=network, interval=interval, period=period, units=units, region=network_region_code, fueltech_group=True, ) if not result: raise Exception("Bad unit test data") with open("power-nsw1.json", "w") as fh: fh.write(result.json(indent=4)) return result
def power_flows_network_week( time_series: TimeSeries, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = interconnector_flow_network_regions_query(time_series=time_series) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: logger.error( "No results from interconnector_flow_network_regions_query with {}".format(time_series) ) return None imports = [ DataQueryResult(interval=i[0], result=i[4], group_by=i[1] if len(i) > 1 else None) for i in row ] result = stats_factory( imports, # code=network_region_code or network.code, network=time_series.network, period=time_series.period, interval=time_series.interval, units=get_unit("regional_trade"), # fueltech_group=True, group_field="power", include_group_code=True, include_code=True, ) if not result: logger.error( "No results from interconnector_flow_network_regions_query with {}".format(time_series) ) return None return result
def net_flows( region: str, data: List[RegionFlowResult], interval: Optional[TimeInterval] = None, ) -> Dict[str, List[DataQueryResult]]: """ Calculates net region flows for a region from a RegionFlowResult """ def get_interval(flow_result: RegionFlowResult) -> datetime: value = flow_result.interval if interval: value = truncate(value, interval.trunc) return value data_net = [] # group regular first for net flows per provided # period bucket from query for k, v in groupby(data, attrgetter("interval")): values = list(v) fr = RegionFlowResult( interval=values[0].interval, flow_from="", flow_to="", flow_from_energy=0.0, flow_to_energy=0.0, ) flow_sum = 0.0 for es in values: if not es.generated: continue if es.flow_from == region: flow_sum += es.generated if es.flow_to == region: flow_sum += -1 * es.generated if flow_sum > 0: fr.flow_from_energy = flow_sum else: fr.flow_to_energy = flow_sum data_net.append(fr) output_set = {} # group interval second with provided interval for k, v in groupby(data_net, get_interval): values = list(v) if k not in output_set: output_set[k] = { "imports": 0.0, "exports": 0.0, } flow_sum_imports = 0.0 flow_sum_exports = 0.0 # Sum up for es in values: if es.flow_to_energy: flow_sum_imports += es.flow_to_energy if es.flow_from_energy: flow_sum_exports += es.flow_from_energy output_set[k]["imports"] = -1 * abs(flow_sum_imports) / 1000 output_set[k]["exports"] = flow_sum_exports / 1000 imports_list = [] exports_list = [] for interval, data in output_set.items(): imports_list.append( DataQueryResult(interval=interval, group_by="imports", result=data["imports"])) exports_list.append( DataQueryResult(interval=interval, group_by="exports", result=data["exports"])) return {"imports": imports_list, "exports": exports_list}
def energy_interconnector_region_daily( time_series: TimeSeries, network_region_code: str, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() period: TimePeriod = human_to_period("1Y") units = get_unit("energy_giga") query = energy_network_flow_query( time_series=time_series, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: return None imports = [DataQueryResult(interval=i[0], group_by="imports", result=i[1]) for i in row] exports = [DataQueryResult(interval=i[0], group_by="exports", result=i[2]) for i in row] imports_mv = [DataQueryResult(interval=i[0], group_by="imports", result=i[3]) for i in row] exports_mv = [DataQueryResult(interval=i[0], group_by="exports", result=i[4]) for i in row] result = stats_factory( imports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, # localize=False, ) # Bail early on no interconnector # don't error if not result: logger.warn("No interconnector energy result") return result result_exports = stats_factory( exports, network=time_series.network, period=period, interval=time_series.interval, units=units, region=network_region_code, fueltech_group=True, ) result.append_set(result_exports) result_imports_mv = stats_factory( imports_mv, units=get_unit("market_value"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=False, ) result.append_set(result_imports_mv) result_export_mv = stats_factory( exports_mv, units=get_unit("market_value"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=False, ) result.append_set(result_export_mv) return result
def energy_fueltech_daily( time_series: TimeSeries, network_region_code: Optional[str] = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() units = get_unit("energy_giga") query = energy_network_fueltech_query( time_series=time_series, network_region=network_region_code, networks_query=networks_query, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) results_energy = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[2] if len(i) > 1 else None) for i in row ] results_market_value = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[3] if len(i) > 1 else None) for i in row ] results_emissions = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[4] if len(i) > 1 else None) for i in row ] if len(results_energy) < 1: logger.error("No results from query: {}".format(query)) return None stats = stats_factory( stats=results_energy, units=units, network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, localize=True, # code=network.code.lower(), ) if not stats: return None stats_market_value = stats_factory( stats=results_market_value, units=get_unit("market_value"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=True, ) stats.append_set(stats_market_value) stats_emissions = stats_factory( stats=results_emissions, units=get_unit("emissions"), network=time_series.network, fueltech_group=True, interval=time_series.interval, region=network_region_code, period=time_series.period, code=time_series.network.code.lower(), localize=True, ) stats.append_set(stats_emissions) return stats
def weather_daily( time_series: TimeSeries, station_code: str, unit_name: str = "temperature_mean", include_min_max: bool = True, network_region: Optional[str] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() units = get_unit(unit_name) query = weather_observation_query( time_series=time_series, station_codes=[station_code], ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) temp_avg = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[2] if len(i) > 1 else None) for i in row ] temp_min = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[3] if len(i) > 1 else None) for i in row ] temp_max = [ DataQueryResult(interval=i[0], group_by=i[1], result=i[4] if len(i) > 1 else None) for i in row ] if len(temp_avg) < 1: logger.error("No results from weather_observation_query with {}".format(time_series)) return None stats = stats_factory( stats=temp_avg, units=units, network=time_series.network, interval=time_series.interval, region=network_region, code="bom", group_field="temperature", localize=False, ) if not stats: logger.error( "No results from weather_observation_query stats factory with {}".format(time_series) ) return None if include_min_max: stats_min = stats_factory( stats=temp_min, units=get_unit("temperature_min"), network=time_series.network, interval=time_series.interval, region=network_region, code="bom", group_field="temperature", localize=False, ) stats_max = stats_factory( stats=temp_max, units=get_unit("temperature_max"), network=time_series.network, interval=time_series.interval, region=network_region, code="bom", group_field="temperature", localize=False, ) stats.append_set(stats_min) stats.append_set(stats_max) return stats
def power_week( time_series: TimeSeries, network_region_code: str = None, networks_query: Optional[List[NetworkSchema]] = None, include_capacities: bool = False, include_code: Optional[bool] = True, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = power_network_fueltech_query( time_series=time_series, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] if len(stats) < 1: logger.error("No results from power week query with {}".format(time_series)) return None result = stats_factory( stats, # code=network_region_code or network.code, network=time_series.network, interval=time_series.interval, period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, ) if not result: logger.error("No results from power week status factory with {}".format(time_series)) return None if include_capacities and network_region_code: region_fueltech_capacities = get_facility_capacities( time_series.network, network_region_code ) for ft in result.data: if ft.fuel_tech in region_fueltech_capacities: ft.x_capacity_at_present = region_fueltech_capacities[ft.fuel_tech] # price time_series_price = time_series.copy() time_series_price.interval = human_to_interval("30m") query = price_network_query( time_series=time_series_price, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats_price = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] stats_market_value = stats_factory( stats=stats_price, code=network_region_code or time_series.network.code.lower(), units=get_unit("price_energy_mega"), network=time_series.network, interval=human_to_interval("30m"), region=network_region_code, period=time_series.period, include_code=include_code, ) result.append_set(stats_market_value) # rooftop solar time_series_rooftop = time_series.copy() time_series_rooftop.interval = human_to_interval("30m") query = power_network_rooftop_query( time_series=time_series_rooftop, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop = stats_factory( rooftop_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, cast_nulls=False, ) # rooftop forecast rooftop_forecast = None if rooftop and rooftop.data and len(rooftop.data) > 0: time_series_rooftop_forecast = time_series_rooftop.copy() time_series_rooftop_forecast.start = rooftop.data[0].history.last time_series_rooftop_forecast.forecast = True query = power_network_rooftop_query( time_series=time_series_rooftop_forecast, networks_query=networks_query, network_region=network_region_code, forecast=True, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_forecast_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop_forecast = stats_factory( rooftop_forecast_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, include_code=include_code, cast_nulls=False, ) if rooftop and rooftop_forecast: if ( hasattr(rooftop, "data") and len(rooftop.data) > 0 and rooftop_forecast.data and len(rooftop_forecast.data) > 0 ): rooftop.data[0].forecast = rooftop_forecast.data[0].history result.append_set(rooftop) return result
def station_observations_api( station_code: str = Query(None, description="Station code"), interval_human: str = Query("15m", description="Interval"), period_human: str = Query("7d", description="Period"), station_codes: List[str] = [], timezone: str = None, offset: str = None, year: int = None, engine=Depends(get_database_engine), ) -> OpennemDataSet: units = get_unit("temperature") if not interval_human: interval_human = "15m" if not period_human: period_human = "7d" if station_code: station_codes = [station_code] interval = human_to_interval(interval_human) period = human_to_period(period_human) if timezone: timezone = ZoneInfo(timezone) if offset: timezone = get_fixed_timezone(offset) query = observation_query( station_codes=station_codes, interval=interval, period=period, year=year, ) with engine.connect() as c: results = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in results ] if len(stats) < 1: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Station stats not found", ) result = stats_factory( stats=stats, units=units, interval=interval, period=period, code="bom", group_field="temperature", ) return result
def energy_interconnector_flows_and_emissions( time_series: TimeSeries, network_region_code: str, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() period: TimePeriod = human_to_period("1Y") unit_energy = get_unit("energy_giga") unit_emissions = get_unit("emissions") query = energy_network_interconnector_emissions_query( time_series=time_series, network_region=network_region_code, networks_query=networks_query, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) if len(row) < 1: return None imports = [ DataQueryResult(interval=i[0], group_by="imports", result=i[1]) for i in row ] exports = [ DataQueryResult(interval=i[0], group_by="exports", result=i[2]) for i in row ] import_emissions = [ DataQueryResult(interval=i[0], group_by="imports", result=i[3]) for i in row ] export_emissions = [ DataQueryResult(interval=i[0], group_by="exports", result=i[4]) for i in row ] import_mv = [ DataQueryResult(interval=i[0], group_by="imports", result=i[5]) for i in row ] export_mv = [ DataQueryResult(interval=i[0], group_by="exports", result=i[6]) for i in row ] result = stats_factory( imports, network=time_series.network, period=period, interval=time_series.interval, units=unit_energy, region=network_region_code, fueltech_group=True, ) if not result: raise Exception("No results from flow controller") result_exports = stats_factory( exports, network=time_series.network, period=period, interval=time_series.interval, units=unit_energy, region=network_region_code, fueltech_group=True, ) result.append_set(result_exports) result_import_emissions = stats_factory( import_emissions, network=time_series.network, period=period, interval=time_series.interval, units=unit_emissions, region=network_region_code, fueltech_group=True, localize=False, ) result.append_set(result_import_emissions) result_export_emissions = stats_factory( export_emissions, network=time_series.network, period=period, interval=time_series.interval, units=unit_emissions, region=network_region_code, fueltech_group=True, localize=False, ) result.append_set(result_export_emissions) # market value for flows result_import_mv = stats_factory( import_mv, network=time_series.network, period=period, interval=time_series.interval, units=get_unit("market_value"), region=network_region_code, fueltech_group=True, localize=False, ) result.append_set(result_import_mv) result_export_mv = stats_factory( export_mv, network=time_series.network, period=period, interval=time_series.interval, units=get_unit("market_value"), region=network_region_code, fueltech_group=True, localize=False, ) result.append_set(result_export_mv) return result
def demand_network_region_daily( time_series: TimeSeries, network_region_code: str | None = None, networks: list[NetworkSchema] = [], ) -> OpennemDataSet | None: """Gets demand market_value and energy for a network -> network_region""" engine = get_database_engine() query = demand_network_region_query(time_series=time_series, network_region=network_region_code, networks=networks) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) results_energy = [ DataQueryResult(interval=i[0], group_by=i[2], result=i[3] if len(i) > 1 else None) for i in row ] results_market_value = [ DataQueryResult(interval=i[0], group_by=i[2], result=i[4] if len(i) > 1 else None) for i in row ] if len(results_energy) < 1: logger.error("No results from query: {}".format(query)) return None # demand based values for VWP stats = stats_factory( stats=results_energy, units=get_unit("demand.energy_giga"), network=time_series.network, fueltech_group=False, interval=time_series.interval, period=time_series.period, localize=True, ) if not stats: raise Exception( f"Not stats for demand_network_region_daily: {network_region_code}" ) stats_market_value = stats_factory( stats=results_market_value, units=get_unit("demand.market_value"), network=time_series.network, fueltech_group=False, interval=time_series.interval, period=time_series.period, code=time_series.network.code.lower(), localize=True, ) if stats_market_value: stats.append_set(stats_market_value) return stats
def power_week( time_series: TimeSeries, network_region_code: str = None, networks_query: Optional[List[NetworkSchema]] = None, ) -> Optional[OpennemDataSet]: engine = get_database_engine() query = power_network_fueltech_query( time_series=time_series, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] if len(stats) < 1: raise Exception("No results from query: {}".format(query)) result = stats_factory( stats, # code=network_region_code or network.code, network=time_series.network, interval=time_series.interval, period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, ) if not result: raise Exception("No results") # price time_series_price = time_series.copy() time_series_price.interval = human_to_interval("30m") query = price_network_query( time_series=time_series_price, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) stats_price = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] stats_market_value = stats_factory( stats=stats_price, code=network_region_code or time_series.network.code.lower(), units=get_unit("price_energy_mega"), network=time_series.network, interval=human_to_interval("30m"), region=network_region_code, period=time_series.period, ) result.append_set(stats_market_value) # rooftop solar time_series_rooftop = time_series.copy() time_series_rooftop.interval = human_to_interval("30m") query = power_network_rooftop_query( time_series=time_series_rooftop, networks_query=networks_query, network_region=network_region_code, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop = stats_factory( rooftop_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, ) # rooftop forecast time_series_rooftop_forecast = time_series.copy() time_series_rooftop_forecast.interval = human_to_interval("30m") time_series_rooftop_forecast.forecast = True query = power_network_rooftop_query( time_series=time_series_rooftop, networks_query=networks_query, network_region=network_region_code, forecast=True, ) with engine.connect() as c: logger.debug(query) row = list(c.execute(query)) rooftop_forecast_power = [ DataQueryResult(interval=i[0], result=i[2], group_by=i[1] if len(i) > 1 else None) for i in row ] rooftop_forecast = stats_factory( rooftop_forecast_power, # code=network_region_code or network.code, network=time_series.network, interval=human_to_interval("30m"), period=time_series.period, units=get_unit("power"), region=network_region_code, fueltech_group=True, ) if rooftop and rooftop_forecast: if (hasattr(rooftop, "data") and len(rooftop.data) > 0 and rooftop_forecast.data and len(rooftop_forecast.data) > 0): rooftop.data[0].forecast = rooftop_forecast.data[0].history result.append_set(rooftop) return result