def test_api(provider, network, kwargs, si_units): """Test main wetterdienst API""" # Build API api = Wetterdienst(provider, network) # Discover parameters assert api.discover() Settings.si_units = si_units # All stations_result request = api(**kwargs).all() stations = request.df # Check stations_result DataFrame columns assert set(stations.columns).issuperset({ "station_id", "from_date", "to_date", "height", "latitude", "longitude", "name", "state", }) # Check that there are actually stations_result assert not stations.empty # Query first DataFrame from values values = next(request.values.query()).df assert set(values.columns).issuperset( {"station_id", "parameter", "date", "value", "quality"}) values = values.drop(columns="quality").dropna(axis=0) assert not values.empty
def get_api(provider: str, network: str): """ Function to get API for provider and network, if non found click.Abort() is casted with the error message :param provider: :param network: :return: """ try: return Wetterdienst(provider, network) except ProviderError as e: log.error(str(e)) sys.exit(1)
def WetterVorhersageNBG(): API = Wetterdienst(provider="dwd", kind="forecast") stations = API(mosmix_type=DwdMosmixType.SMALL).filter( station_id="10763" ) #Station ID ist anders als bei den Historischendaten... völlig klar df = stations.values.all().df df = df[df['PARAMETER'] == 'TEMPERATURE_AIR_200'] df['VALUE'] = df['VALUE'] - 273.15 df['date'] = df['DATE'].dt.date df = df.groupby('date').mean() return df #CoronaDatenNbg, Wetterdaten, Restriktionen = DatenVergangenheitHolen() #CoronaDatenNbg.head()
def test_api(provider, kind, kwargs, si_units): """ Test main wetterdienst API """ # Build API api = Wetterdienst(provider, kind) # Discover parameters assert api.discover() # All stations request = api(**kwargs, si_units=si_units).all() stations = request.df # Check stations DataFrame columns assert set(stations.columns).issuperset({ "station_id", "from_date", "to_date", "height", "latitude", "longitude", "name", "state", }) # Check that there are actually stations assert not stations.empty # Query first DataFrame from values values = next(request.values.query()).df # TODO: DWD Forecast has no quality assert set(values.columns).issuperset( {"station_id", "parameter", "date", "value", "quality"}) assert not values.empty
def coverage(provider, network, filter_, debug): set_logging_level(debug) if not provider or not network: print(json.dumps(Wetterdienst.discover(), indent=4)) # noqa: T001 return api = get_api(provider=provider, network=network) cov = api.discover( filter_=filter_, flatten=False, ) print(json.dumps(cov, indent=4)) # noqa: T001 return
def set_location(latitude, longitude): global local_stations global local_radolan_idx global observer # Find 2 local forecast stations api = Wetterdienst(provider = 'dwd', kind = 'forecast') stations = api(parameter="large", mosmix_type=DwdMosmixType.LARGE) local_stations = stations.filter_by_rank(latitude=latitude, longitude=longitude, rank=2) # Determine local index in the radolan grid proj_stereo = wrl.georef.create_osr("dwd-radolan") proj_wgs = osr.SpatialReference() proj_wgs.ImportFromEPSG(4326) radolan_grid_xy = wrl.georef.get_radolan_grid(900, 900) coord_xy = wrl.georef.reproject([longitude, latitude], projection_source=proj_wgs, projection_target=proj_stereo) distance_xy = np.hypot(radolan_grid_xy[:, :, 0] - coord_xy[0], radolan_grid_xy[:, :, 1] - coord_xy[1]) local_radolan_idx = np.argwhere(distance_xy < 10) # Define observer for sun position observer = astral.Observer(latitude=latitude, longitude=longitude)
def get_data(station, data_type): if data_type == 'L': mosmix_type = DwdMosmixType.LARGE elif data_type == 'S': mosmix_type = DwdMosmixType.SMALL API = Wetterdienst(provider="dwd", kind="forecast") stations = API(parameter="large", mosmix_type=mosmix_type, humanize=True, tidy=True).filter_by_station_id(station_id=[station]) df = stations.values.all().df df = df.drop(columns=['quality']) # remove categories df.parameter = df.parameter.astype(str) # Do some units conversion params = [ 'temperature_air_200', 'temperature_dew_point_200', 'temperature_air_max_200', 'temperature_air_min_200', 'temperature_air_005', 'temperature_air_min_005_last_12h', "temperature_air_200_last_24h" ] df.loc[df.parameter.isin(params), 'value'] = df.loc[df.parameter.isin(params), 'value'] - 273.15 # params = [ 'wind_speed', 'wind_gust_max_last_1h', 'wind_gust_max_last_3h', 'wind_gust_max_last_12h' ] df.loc[df.parameter.isin(params), 'value'] = df.loc[df.parameter.isin(params), 'value'] * 3.6 # df.loc[df.parameter == 'pressure_air_surface_reduced', 'value'] = \ df.loc[df.parameter == 'pressure_air_surface_reduced', 'value'] / 100. return df
def coverage( provider: str = Query(default=None), network: str = Query(default=None), debug: bool = Query(default=False), filter_=Query(alias="filter", default=None), ): set_logging_level(debug) if not provider or not network: cov = Wetterdienst.discover() return Response(content=json.dumps(cov, indent=4), media_type="application/json") api = get_api(provider=provider, network=network) cov = api.discover( filter_=filter_, flatten=False, ) return Response(content=json.dumps(cov, indent=4), media_type="application/json")
def DatenVergangenheitHolen(): # Daten des 7TIW holen dfgeo = gpd.read_file( 'https://opendata.arcgis.com/datasets/dd4580c810204019a7b8eb3e0b329dd6_0.geojson', ignore_geometry=True) dfgeo = dfgeo.loc[dfgeo['Landkreis'] == 'SK Nürnberg'] dfgeo = dfgeo.groupby(['Refdatum']).sum() dfgeo = dfgeo.reset_index() df1 = pd.DataFrame(dfgeo) df1['Summe7Tage'] = df1.AnzahlFall.rolling(min_periods=1, window=7).sum() df1['7TIW'] = (df1.Summe7Tage / 518000) * 100000 df1 = df1[['Refdatum', '7TIW']] #Wetterdaten für Nbg holen API = Wetterdienst("dwd", "observation") request = API( parameter=["climate_summary"], resolution="daily", start_date="2020-04-02", # Timezone: UTC end_date=today, # Timezone: UTC tidy_data=True, # default humanize_parameters=True, # default ).filter(station_id=[3668]) #Wetter für Nbg stations = request.df values = request.values.all().df values = values.loc[values['PARAMETER'] == 'TEMPERATURE_AIR_200'] #Corona Restriktionen holen / später für Bayern? dfCoronaRestr = pd.read_csv( 'https://raw.githubusercontent.com/OxCGRT/covid-policy-tracker/master/data/OxCGRT_latest.csv', usecols=["Date", "StringencyIndexForDisplay", "CountryName"]) dfCoronaRestr = dfCoronaRestr[dfCoronaRestr.CountryName == 'Germany'] stringency = dfCoronaRestr[['Date', 'StringencyIndexForDisplay']] return df1, values, stringency
def values( provider: str = Query(default=None), network: str = Query(default=None), parameter: str = Query(default=None), resolution: str = Query(default=None), period: str = Query(default=None), date: str = Query(default=None), issue: str = Query(default="latest"), all_: str = Query(alias="all", default=False), station: str = Query(default=None), name: str = Query(default=None), coordinates: str = Query(default=None), rank: int = Query(default=None), distance: float = Query(default=None), bbox: str = Query(default=None), sql: str = Query(default=None), sql_values: str = Query(alias="sql-values", default=None), humanize: bool = Query(default=True), tidy: bool = Query(default=True), si_units: bool = Query(alias="si-units", default=True), skip_empty: bool = Query(alias="skip-empty", default=False), skip_threshold: float = Query( alias="skip-threshold", default=0.95, gt=0, le=1), dropna: bool = Query(alias="dropna", default=False), pretty: bool = Query(default=False), debug: bool = Query(default=False), ): """ Acquire data from DWD. :param provider: :param network: string for network of provider :param parameter: Observation measure :param resolution: Frequency/granularity of measurement interval :param period: Recent or historical files :param date: Date or date range :param issue: :param all_: :param station: :param name: :param coordinates: :param rank: :param distance: :param bbox: :param sql: SQL expression :param sql_values: :param fmt: :param humanize: :param tidy: Whether to return data in tidy format. Default: True. :param si_units: :param pretty: :param debug: :return: """ # TODO: Add geojson support fmt = "json" if provider is None or network is None: raise HTTPException( status_code=400, detail="Query arguments 'provider' and 'network' are required", ) if parameter is None or resolution is None: raise HTTPException( status_code=400, detail="Query arguments 'parameter', 'resolution' " "and 'date' are required", ) if fmt not in ("json", "geojson"): raise HTTPException( status_code=400, detail="format argument must be one of json, geojson", ) set_logging_level(debug) try: api: ScalarRequestCore = Wetterdienst(provider, network) except ProviderError: return HTTPException( status_code=404, detail=f"Given combination of provider and network not available. " f"Choose provider and network from {Wetterdienst.discover()}", ) parameter = read_list(parameter) if period: period = read_list(period) if station: station = read_list(station) try: values_ = get_values( api=api, parameter=parameter, resolution=resolution, date=date, issue=issue, period=period, all_=all_, station_id=station, name=name, coordinates=coordinates, rank=rank, distance=distance, bbox=bbox, sql=sql, sql_values=sql_values, si_units=si_units, skip_empty=skip_empty, skip_threshold=skip_threshold, dropna=dropna, tidy=tidy, humanize=humanize, ) except Exception as e: log.exception(e) return HTTPException(status_code=404, detail=str(e)) indent = None if pretty: indent = 4 output = values_.df output[Columns.DATE.value] = output[Columns.DATE.value].apply( lambda ts: ts.isoformat()) output = output.replace({np.NaN: None, pd.NA: None}) output = output.to_dict(orient="records") output = make_json_response(output, api.provider) output = json.dumps(output, indent=indent, ensure_ascii=False) return Response(content=output, media_type="application/json")
def stations( provider: str = Query(default=None), network: str = Query(default=None), parameter: str = Query(default=None), resolution: str = Query(default=None), period: str = Query(default=None), all_: str = Query(alias="all", default=False), station_id: str = Query(default=None), name: str = Query(default=None), coordinates: str = Query(default=None), rank: int = Query(default=None), distance: float = Query(default=None), bbox: str = Query(default=None), sql: str = Query(default=None), fmt: str = Query(alias="format", default="json"), debug: bool = Query(default=False), pretty: bool = Query(default=False), ): if provider is None or network is None: raise HTTPException( status_code=400, detail="Query arguments 'provider' and 'network' are required", ) if parameter is None or resolution is None: raise HTTPException( status_code=400, detail="Query arguments 'parameter', 'resolution' " "and 'period' are required", ) if fmt not in ("json", "geojson"): raise HTTPException( status_code=400, detail="format argument must be one of json, geojson", ) set_logging_level(debug) try: api = Wetterdienst(provider, network) except ProviderError: return HTTPException( status_code=404, detail= f"Choose provider and network from {app.url_path_for('coverage')}", ) parameter = read_list(parameter) if period: period = read_list(period) if station_id: station_id = read_list(station_id) try: stations_ = get_stations( api=api, parameter=parameter, resolution=resolution, period=period, date=None, issue=None, all_=all_, station_id=station_id, name=name, coordinates=coordinates, rank=rank, distance=distance, bbox=bbox, sql=sql, tidy=False, si_units=False, humanize=False, skip_empty=False, skip_threshold=0.95, dropna=False, ) except (KeyError, ValueError) as e: return HTTPException(status_code=404, detail=str(e)) if not stations_.parameter or not stations_.resolution: return HTTPException( status_code=404, detail= f"No parameter found for provider {provider}, network {network}, " f"parameter(s) {parameter} and resolution {resolution}.", ) stations_.df = stations_.df.replace({np.nan: None, pd.NA: None}) indent = None if pretty: indent = 4 if fmt == "json": output = stations_.to_dict() elif fmt == "geojson": output = stations_.to_ogc_feature_collection() output = make_json_response(output, api.provider) output = json.dumps(output, indent=indent, ensure_ascii=False) return Response(content=output, media_type="application/json")