def test_forecast_post_mismatched_aggregate_variable(api): payload = copy_update(VALID_FORECAST_AGG_JSON, 'variable', 'ac_power') res = api.post('/forecasts/single/', base_url=BASE_URL, json=payload) assert res.status_code == 400 assert res.json == { "errors": { "variable": ["Forecast variable must match aggregate."] } }
def test_forecast_post_power_at_weather_site(api, variable): payload = copy_update(VALID_FORECAST_JSON, 'variable', variable) res = api.post('/forecasts/single/', base_url=BASE_URL, json=payload) assert res.status_code == 400 assert res.json == { "errors": { "site": [ "Site must have modeling parameters to create " f"{', '.join(POWER_VARIABLES)} records." ] } }
def test_update_aggregate_add_obs_bad_obs(api, aggregate_id, intext, field, val): r1 = api.post('/observations/', base_url=BASE_URL, json=copy_update(VALID_OBS_JSON, field, val)) obs_id = r1.get_data(as_text=True) payload = {'observations': [ {'observation_id': obs_id, 'effective_from': '2019-01-01 01:23:00Z'}]} res = api.post(f'/aggregates/{aggregate_id}/metadata', json=payload, base_url=BASE_URL) assert res.status_code == 400 assert intext in res.get_data(as_text=True)
def test_update_aggregate_add_obs_bad_many(api, aggregate_id): r1 = api.post('/observations/', base_url=BASE_URL, json=copy_update(VALID_OBS_JSON, 'interval_length', 300)) obs_id = r1.get_data(as_text=True) payload = {'observations': [ {'observation_id': obs_id, 'effective_from': '2019-01-01 01:23:00Z'}, {'observation_id': '123e4567-e89b-12d3-a456-426655440000', 'effective_from': '2019-01-01 01:23:00Z'} ]} res = api.post(f'/aggregates/{aggregate_id}/metadata', json=payload, base_url=BASE_URL) assert res.status_code == 400 assert 'present and valid' in res.get_data(as_text=True) assert 'interval length is not less' in res.get_data(as_text=True)
assert res.status_code == 200 resp = res.get_json() for agg in resp: assert 'observations' in agg def test_post_aggregate_success(api): res = api.post('/aggregates/', base_url=BASE_URL, json=VALID_AGG_JSON) assert res.status_code == 201 assert 'Location' in res.headers @pytest.mark.parametrize('payload,message', [ (copy_update(VALID_AGG_JSON, 'variable', 'other'), f'{{"variable":["Must be one of: {variables}."]}}'), (copy_update(VALID_AGG_JSON, 'aggregate_type', 'cov'), f'{{"aggregate_type":["Must be one of: {agg_types}."]}}'), (copy_update(VALID_AGG_JSON, 'interval_label', 'instant'), '{"interval_label":["Must be one of: beginning, ending."]}'), ({}, '{"aggregate_type":["Missing data for required field."],"description":["Missing data for required field."],"interval_label":["Missing data for required field."],"interval_length":["Missing data for required field."],"name":["Missing data for required field."],"timezone":["Missing data for required field."],"variable":["Missing data for required field."]}'), # NOQA (copy_update(VALID_AGG_JSON, 'interval_length', '61'), f'{{"interval_length":["Must be a divisor of one day."]}}'), ]) def test_post_aggregate_bad_request(api, payload, message): res = api.post('/aggregates/', base_url=BASE_URL, json=payload) assert res.status_code == 400 assert res.get_data(as_text=True) == f'{{"errors":{message}}}\n'
VALID_SITE_JSON, BASE_URL, copy_update, demo_sites) def invalidate(json, key): new_json = json.copy() new_json[key] = 'invalid' return new_json def removekey(json, key): new_json = json.copy() del new_json[key] return new_json INVALID_NAME = copy_update(VALID_SITE_JSON, 'name', '<script>kiddies</script>') INVALID_ELEVATION = invalidate(VALID_SITE_JSON, 'elevation') INVALID_LATITUDE = invalidate(VALID_SITE_JSON, 'latitude') INVALID_LONGITUDE = invalidate(VALID_SITE_JSON, 'longitude') INVALID_TIMEZONE = invalidate(VALID_SITE_JSON, 'timezone') INVALID_AC_CAPACITY = invalidate(VALID_SITE_JSON, 'ac_capacity') INVALID_DC_CAPACITY = invalidate(VALID_SITE_JSON, 'dc_capacity') INVALID_BACKTRACK = invalidate(VALID_SITE_JSON, 'backtrack') INVALID_T_COEFF = invalidate(VALID_SITE_JSON, 'temperature_coefficient') INVALID_COVERAGE = invalidate(VALID_SITE_JSON, 'ground_coverage_ratio') INVALID_SURFACE_AZIMUTH = invalidate(VALID_SITE_JSON, 'surface_azimuth') INVALID_SURFACE_TILT = invalidate(VALID_SITE_JSON, 'surface_tilt') INVALID_TRACKING_TYPE = invalidate(VALID_SITE_JSON, 'tracking_type') OUTSIDE_LATITUDE = VALID_SITE_JSON.copy() OUTSIDE_LATITUDE['latitude'] = 91
def test_forecast_post_site_id_is_aggregate(api, aggregate_id): payload = copy_update(VALID_FORECAST_JSON, 'site_id', aggregate_id) res = api.post('/forecasts/single/', base_url=BASE_URL, json=payload) assert res.status_code == 404
from io import BytesIO import json import pandas as pd import pytest from sfa_api.conftest import (variables, interval_value_types, interval_labels, BASE_URL, VALID_FORECAST_JSON, copy_update, VALID_FX_VALUE_JSON, VALID_FX_VALUE_CSV, VALID_FORECAST_AGG_JSON, UNSORTED_FX_VALUE_JSON, ADJ_FX_VALUE_JSON, demo_forecasts, _get_large_test_payload) from sfa_api.utils.storage_interface import POWER_VARIABLES INVALID_NAME = copy_update(VALID_FORECAST_JSON, 'name', 'Bad semicolon;') INVALID_VARIABLE = copy_update(VALID_FORECAST_JSON, 'variable', 'invalid') INVALID_INTERVAL_LABEL = copy_update(VALID_FORECAST_JSON, 'interval_label', 'invalid') INVALID_ISSUE_TIME = copy_update(VALID_FORECAST_JSON, 'issue_time_of_day', 'invalid') INVALID_LEAD_TIME = copy_update(VALID_FORECAST_JSON, 'lead_time_to_start', 'invalid') INVALID_INTERVAL_LENGTH = copy_update(VALID_FORECAST_JSON, 'interval_length', 'invalid') INVALID_RUN_LENGTH = copy_update(VALID_FORECAST_JSON, 'run_length', 'invalid') INVALID_VALUE_TYPE = copy_update(VALID_FORECAST_JSON, 'interval_value_type', 'invalid') INVALID_BOTH_IDS = copy_update(VALID_FORECAST_JSON, 'aggregate_id', '458ffc27-df0b-11e9-b622-62adb5fd6af0') INVALID_NO_IDS = VALID_FORECAST_JSON.copy() del INVALID_NO_IDS['site_id']
def test_observation_post_power_at_weather_site(api, variable): obs_json = copy_update(VALID_OBS_JSON, 'variable', variable) r = api.post('/observations/', base_url=BASE_URL, json=obs_json) assert r.status_code == 400
def test_observation_post_bad_site(api, missing_id): payload = copy_update(VALID_OBS_JSON, 'site_id', missing_id) r = api.post('/observations/', base_url=BASE_URL, json=payload) assert r.status_code == 404
from io import BytesIO import json import pandas as pd import pytest from sfa_api.conftest import (variables, interval_labels, BASE_URL, VALID_OBS_VALUE_JSON, VALID_OBS_VALUE_CSV, VALID_OBS_JSON, copy_update, _get_large_test_payload, demo_observations) INVALID_NAME = copy_update(VALID_OBS_JSON, 'name', '#Nope') INVALID_VARIABLE = copy_update(VALID_OBS_JSON, 'variable', 'invalid') INVALID_INTERVAL_LABEL = copy_update(VALID_OBS_JSON, 'interval_label', 'invalid') empty_json_response = '{"interval_label":["Missing data for required field."],"interval_length":["Missing data for required field."],"interval_value_type":["Missing data for required field."],"name":["Missing data for required field."],"site_id":["Missing data for required field."],"variable":["Missing data for required field."]}' # NOQA @pytest.fixture(params=['missing', 'fx']) def bad_id(missing_id, forecast_id, request): if request.param == 'missing': return missing_id else: return forecast_id def test_observation_post_success(api): r = api.post('/observations/', base_url=BASE_URL, json=VALID_OBS_JSON) assert r.status_code == 201 assert 'Location' in r.headers
def test_cdf_forecast_group_post_invalid_agg(api, missing_id): payload = copy_update(VALID_CDF_FORECAST_AGG_JSON, 'aggregate_id', missing_id) res = api.post('/forecasts/cdf/', base_url=BASE_URL, json=payload) assert res.status_code == 404
def test_forecast_post_power_at_weather_site(api, variable): payload = copy_update(VALID_CDF_FORECAST_JSON, 'variable', variable) res = api.post('/forecasts/cdf/', base_url=BASE_URL, json=payload) assert res.status_code == 400