def test_apisession_list_forecasts(requests_mock, many_forecasts, many_forecasts_text, mock_list_sites): session = api.APISession('') matcher = re.compile(f'{session.base_url}/forecasts/.*') requests_mock.register_uri('GET', matcher, content=many_forecasts_text) fx_list = session.list_forecasts() assert fx_list == many_forecasts
def make_latest_nwp_forecasts(token, run_time, issue_buffer, base_url=None): """ Make all reference NWP forecasts for *run_time* that are within *issue_buffer* of the next issue time for the forecast. For example, this function may run in a cronjob every five minutes with *run_time* set to now. By setting *issue_buffer* to '5min', only forecasts that should be issued in the next five minutes will be generated on each run. Only forecasts that belong to the same provider/organization of the token user will be updated. Parameters ---------- token : str Access token for the API run_time : pandas.Timestamp Run time of the forecast generation issue_buffer : pandas.Timedelta Maximum time between *run_time* and the next initialization time of each forecast that will be updated base_url : str or None, default None Alternate base_url of the API """ session = api.APISession(token, base_url=base_url) user_info = session.get_user_info() forecasts = session.list_forecasts() forecasts += session.list_probabilistic_forecasts() forecasts = [fx for fx in forecasts if fx.provider == user_info['organization']] forecast_df = find_reference_nwp_forecasts(forecasts, run_time) execute_for = forecast_df[ forecast_df.next_issue_time <= run_time + issue_buffer] if execute_for.empty: logger.info('No forecasts to be made at %s', run_time) return process_nwp_forecast_groups(session, run_time, execute_for)
def test_apisession_post_forecast_values(requests_mock, forecast_values): session = api.APISession('') matcher = re.compile(f'{session.base_url}/forecasts/single/.*/values') mocked = requests_mock.register_uri('POST', matcher) session.post_forecast_values('fxid', forecast_values) assert mocked.request_history[ 0].text == '{"values":[{"timestamp":"2019-01-01T13:00:00Z","value":0.0},{"timestamp":"2019-01-01T14:00:00Z","value":1.0},{"timestamp":"2019-01-01T15:00:00Z","value":2.0},{"timestamp":"2019-01-01T16:00:00Z","value":3.0},{"timestamp":"2019-01-01T17:00:00Z","value":4.0},{"timestamp":"2019-01-01T18:00:00Z","value":5.0}]}' # NOQA
def test_apisession_get_forecast(requests_mock, single_forecast, single_forecast_text, mock_get_site): session = api.APISession('') matcher = re.compile(f'{session.base_url}/forecasts/.*') requests_mock.register_uri('GET', matcher, content=single_forecast_text) fx = session.get_forecast('') assert fx == single_forecast
def test_apisession_list_observations(requests_mock, many_observations, many_observations_text, mock_list_sites): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*') requests_mock.register_uri('GET', matcher, content=many_observations_text) obs_list = session.list_observations() assert obs_list == many_observations
def test_apisession_get_observation(requests_mock, single_observation, single_observation_text, mock_get_site): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*') requests_mock.register_uri('GET', matcher, content=single_observation_text) obs = session.get_observation('') assert obs == single_observation
def make_latest_persistence_forecasts(token, max_run_time, base_url=None): """Make all reference persistence forecasts that need to be made up to *max_run_time*. Parameters ---------- token : str Access token for the API max_run_time : pandas.Timestamp Last possible run time of the forecast generation base_url : str or None, default None Alternate base_url of the API """ session = api.APISession(token, base_url=base_url) forecasts = session.list_forecasts() observations = session.list_observations() params = generate_reference_persistence_forecast_parameters( session, forecasts, observations, max_run_time) for fx, obs, issue_time, index in params: run_time = issue_time logger.info('Making persistence forecast for %s:%s at %s', fx.name, fx.forecast_id, issue_time) try: fx_ser = run_persistence(session, obs, fx, run_time, issue_time, index=index) except ValueError as e: logger.error('Unable to generate persistence forecast: %s', e) else: session.post_forecast_values(fx.forecast_id, fx_ser)
def test_apisession_post_observation_values(requests_mock, observation_values): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*/values') mocked = requests_mock.register_uri('POST', matcher) session.post_observation_values('obsid', observation_values) # observation_values_text has a different timestamp format assert mocked.request_history[ 0].text == '{"values":[{"timestamp":"2019-01-01T19:00:00Z","value":0.0,"quality_flag":0},{"timestamp":"2019-01-01T19:05:00Z","value":1.0,"quality_flag":0},{"timestamp":"2019-01-01T19:10:00Z","value":1.5,"quality_flag":0},{"timestamp":"2019-01-01T19:15:00Z","value":9.9,"quality_flag":1},{"timestamp":"2019-01-01T19:20:00Z","value":2.0,"quality_flag":0},{"timestamp":"2019-01-01T19:25:00Z","value":-999.0,"quality_flag":3}]}' # NOQA
def test_apisession_get_forecast_values_empty(requests_mock, empty_df): session = api.APISession('') matcher = re.compile(f'{session.base_url}/forecasts/single/.*/values') requests_mock.register_uri('GET', matcher, content=b'{"values":[]}') out = session.get_forecast_values('fxid', pd.Timestamp('2019-01-01T06:00:00-0600'), pd.Timestamp('2019-01-01T11:00:00-0600')) pdt.assert_series_equal(out, empty_df['value'])
def test_apisession_get_observation_values_empty(requests_mock, empty_df): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*/values') requests_mock.register_uri('GET', matcher, content=b'{"values":[]}') out = session.get_observation_values( 'obsid', pd.Timestamp('2019-01-01T12:00:00-0600'), pd.Timestamp('2019-01-01T12:25:00-0600')) pdt.assert_frame_equal(out, empty_df)
def _fill_persistence_gaps(token, start, end, base_url, forecast_fnc): session = api.APISession(token, base_url=base_url) forecasts = getattr(session, forecast_fnc)() observations = session.list_observations() params = generate_reference_persistence_forecast_gaps_parameters( session, forecasts, observations, start, end) for fx, obs, index, data_start, data_end, issue_times in params: _pers_loop(session, fx, obs, index, data_start, data_end, issue_times)
def real_session(auth_token): session = api.APISession( auth_token, base_url='https://dev-api.solarforecastarbiter.org') try: session.get('') except Exception: return pytest.skip('Cannot connect to dev api') else: return session
def test_get_data_for_report_event(mock_event_data, event_report_objects): report, observation, forecast_0, forecast_1 = event_report_objects session = api.APISession('nope') data = main.get_data_for_report(session, report) assert isinstance(data[observation], pd.DataFrame) assert isinstance(data[forecast_0], pd.Series) assert isinstance(data[forecast_1], pd.Series) get_forecast_values, get_observation_values = mock_event_data assert get_forecast_values.call_count == 2 assert get_observation_values.call_count == 1
def _get_nwp_forecast_df(token, run_time, base_url): session = api.APISession(token, base_url=base_url) user_info = session.get_user_info() forecasts = session.list_forecasts() forecasts += session.list_probabilistic_forecasts() forecasts = [ fx for fx in forecasts if fx.provider == user_info['organization'] ] forecast_df = find_reference_nwp_forecasts(forecasts, run_time) return session, forecast_df
def test_apisession_create_site(requests_mock, single_site, site_text): session = api.APISession('') matcher = re.compile(f'{session.base_url}/sites/.*') requests_mock.register_uri('POST', matcher, text=single_site.site_id) requests_mock.register_uri('GET', matcher, content=site_text) site_dict = single_site.to_dict() del site_dict['site_id'] del site_dict['provider'] del site_dict['extra_parameters'] ss = type(single_site).from_dict(site_dict) new_site = session.create_site(ss) assert new_site == single_site
def test_capture_report_failure_is_valid_datamodel(mocker): api_post = mocker.patch('solarforecastarbiter.io.api.APISession.post') def fail(): raise TypeError() session = api.APISession('nope') failwrap = main.capture_report_failure('report_id', session) with pytest.raises(TypeError): failwrap(fail)() raw = datamodel.RawReport.from_dict(api_post.call_args_list[0][1]['json']) assert 'CRITICAL' == raw.messages[0].level
def test_capture_report_failure(mocker): api_post = mocker.patch('solarforecastarbiter.io.api.APISession.post') def fail(): raise TypeError() session = api.APISession('nope') failwrap = main.capture_report_failure('report_id', session) with pytest.raises(TypeError): failwrap(fail)() assert 'Critical ' in api_post.call_args_list[0][1]['json']['messages'][0]['message'] # NOQA assert api_post.call_args_list[1][0][0] == '/reports/report_id/status/failed' # NOQA
def test_apisession_create_observation(requests_mock, single_observation, single_observation_text, mock_get_site): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*') requests_mock.register_uri('POST', matcher, text=single_observation.observation_id) requests_mock.register_uri('GET', matcher, content=single_observation_text) observation_dict = single_observation.to_dict() del observation_dict['observation_id'] del observation_dict['extra_parameters'] ss = type(single_observation).from_dict(observation_dict) new_observation = session.create_observation(ss) assert new_observation == single_observation
def test_apisession_create_forecast(requests_mock, single_forecast, single_forecast_text, mock_get_site): session = api.APISession('') matcher = re.compile(f'{session.base_url}/forecasts/single/.*') requests_mock.register_uri('POST', matcher, text=single_forecast.forecast_id) requests_mock.register_uri('GET', matcher, content=single_forecast_text) forecast_dict = single_forecast.to_dict() del forecast_dict['forecast_id'] del forecast_dict['extra_parameters'] ss = type(single_forecast).from_dict(forecast_dict) new_forecast = session.create_forecast(ss) assert new_forecast == single_forecast
def test_get_data_for_report(mock_data, report_objects, mocker): report, observation, forecast_0, forecast_1, aggregate, forecast_agg = \ report_objects session = api.APISession('nope') apply_obs = mocker.spy(main, 'apply_validation') data = main.get_data_for_report(session, report) assert apply_obs.call_count == 2 assert isinstance(data[observation], pd.DataFrame) assert isinstance(data[forecast_0], pd.Series) assert isinstance(data[forecast_1], pd.Series) assert isinstance(data[aggregate], pd.DataFrame) assert isinstance(data[forecast_agg], pd.Series) get_forecast_values, get_observation_values, get_aggregate_values = \ mock_data assert get_forecast_values.call_count == 4 assert get_observation_values.call_count == 1 assert get_aggregate_values.call_count == 1
def make_latest_probabilistic_persistence_forecasts( token, max_run_time, base_url=None): """Make all reference probabilistic persistence forecasts that need to be made up to *max_run_time*. Parameters ---------- token : str Access token for the API max_run_time : pandas.Timestamp Last possible run time of the forecast generation base_url : str or None, default None Alternate base_url of the API """ session = api.APISession(token, base_url=base_url) forecasts = session.list_probabilistic_forecasts() observations = session.list_observations() params = generate_reference_persistence_forecast_parameters( session, forecasts, observations, max_run_time) for fx, obs, index, data_start, issue_times in params: load_data = _preload_load_data(session, obs, data_start, max_run_time) out = defaultdict(list) logger.info('Making persistence forecast for %s:%s from %s to %s', fx.name, fx.forecast_id, issue_times[0], issue_times[-1]) for issue_time in issue_times: run_time = issue_time try: fx_list = run_persistence( session, obs, fx, run_time, issue_time, index=index, load_data=load_data) except ValueError as e: logger.error('Unable to generate persistence forecast: %s', e) else: # api requires a post per constant value cv_ids = [f.forecast_id for f in fx.constant_values] for id_, fx_ser in zip(cv_ids, fx_list): out[id_].append(fx_ser) for id_, serlist in out.items(): if len(serlist) > 0: ser = pd.concat(serlist) for cser in generate_continuous_chunks( ser, fx.interval_length): session.post_probabilistic_forecast_constant_value_values( id_, cser)
def make_latest_persistence_forecasts(token, max_run_time, base_url=None): """Make all reference persistence forecasts that need to be made up to *max_run_time*. Parameters ---------- token : str Access token for the API max_run_time : pandas.Timestamp Last possible run time of the forecast generation base_url : str or None, default None Alternate base_url of the API """ session = api.APISession(token, base_url=base_url) forecasts = session.list_forecasts() observations = session.list_observations() params = generate_reference_persistence_forecast_parameters( session, forecasts, observations, max_run_time) for fx, obs, index, data_start, issue_times in params: _pers_loop(session, fx, obs, index, data_start, max_run_time, issue_times)
def make_reference_aggregates(token, provider, base_url, aggregates=None): """Create the reference aggregates in the API. Parameters ---------- token: str Access token for the API provider: str Provider name to filter all API observations on base_url: str URL of the API to list objects and create aggregate at aggregates: list or None List of dicts that describes each aggregate. Defaults to REF_AGGREGATES if None. Raises ------ ValueError If an observation does not exist in the API for an aggregate or multiple observations match the given name and site name. """ session = api.APISession(token, base_url=base_url) observations = list(filter(lambda x: x.provider == provider, session.list_observations())) existing_aggregates = {ag.name for ag in session.list_aggregates()} if aggregates is None: aggregates = REF_AGGREGATES for agg_def in aggregates: if agg_def['name'] in existing_aggregates: logger.warning('Aggregate %s already exists', agg_def['name']) # TODO: update the aggregate if the definition has changed continue logger.info('Creating aggregate %s', agg_def['name']) agg = generate_aggregate(observations, agg_def) # allow create to raise any API errors session.create_aggregate(agg)
def test_apisession_get_site(mock_get_site, get_site): session = api.APISession('') site = session.get_site('123e4567-e89b-12d3-a456-426655440002') assert site == get_site('123e4567-e89b-12d3-a456-426655440002')
def session(requests_mock, observation_values_text): session = api.APISession('') matcher = re.compile(f'{session.base_url}/observations/.*/values') requests_mock.register_uri('GET', matcher, content=observation_values_text) return session
def session(requests_mock): return api.APISession('')
def test_apisession_init(requests_mock): session = api.APISession('TOKEN') requests_mock.register_uri('GET', session.base_url) res = session.get('') assert res.request.headers['Authorization'] == 'Bearer TOKEN'
def test_apisession_init_hidden(requests_mock): ht = utils.HiddenToken('TOKEN') session = api.APISession(ht) requests_mock.register_uri('GET', session.base_url) res = session.get('') assert res.request.headers['Authorization'] == 'Bearer TOKEN'
def test_apisession_get_site_dne(requests_mock): session = api.APISession('') matcher = re.compile(f'{session.base_url}/.*') requests_mock.register_uri('GET', matcher, status_code=404) with pytest.raises(requests.exceptions.HTTPError): session.get_site('123e4567-e89b-12d3-a456-426655440002')
def test_apisession_request(endpoint, method, expected, requests_mock): session = api.APISession('') matcher = re.compile(f'{session.base_url}/.*') requests_mock.register_uri(method, matcher) res = session.request(method, endpoint) assert res.request.url == expected