def test_output_mimetype(self): """Test input parsing """ my_process = create_mimetype_process() service = Service(processes=[my_process]) self.assertEqual(len(service.processes.keys()), 1) self.assertTrue(service.processes['get_mimetype_process']) class FakeRequest(): def __init__(self, mimetype): self.outputs = {'mimetype': { 'identifier': 'mimetype', 'mimetype': mimetype, 'data': 'the data' }} identifier = 'get_mimetype_process' service = 'wps' operation = 'execute' version = '1.0.0' inputs = {} raw = False store_execute = False lineage = False # valid mimetype request = FakeRequest('text/plain+test') response = service.execute('get_mimetype_process', request, 'fakeuuid') self.assertEqual(response.outputs['mimetype'].data, 'text/plain+test') # non valid mimetype request = FakeRequest('text/xml') with self.assertRaises(InvalidParameterValue): response = service.execute('get_mimetype_process', request, 'fakeuuid')
def test_dods(self): if not WITH_NC4: self.skipTest('netCDF4 not installed') my_process = create_complex_nc_process() service = Service(processes=[my_process]) href = "http://test.opendap.org:80/opendap/netcdf/examples/sresa1b_ncar_ccsm3_0_run1_200001.nc" """ # Is this how the request should be written ? request_doc = WPS.Execute( OWS.Identifier('my_opendap_process'), WPS.DataInputs( WPS.Input( OWS.Identifier('dods'), WPS.Reference( WPS.Body('request body'), {'{http://www.w3.org/1999/xlink}href': href}, method='POST' ) #WPS.Data(WPS.ComplexData(href=href, mime_type='application/x-ogc-dods')) # This form is not supported yet. Should it be ? ) ), version='1.0.0' ) resp = client.post_xml(doc=request_doc) assert_response_success(resp) """ class FakeRequest(): identifier = 'my_opendap_process' service = 'wps' operation = 'execute' version = '1.0.0' raw = True, inputs = {'dods': [{ 'identifier': 'dods', 'href': href, }]} store_execute = False lineage = False outputs = ['conventions'] request = FakeRequest() resp = service.execute('my_opendap_process', request, 'fakeuuid') self.assertEqual(resp.outputs['conventions'].data, u'CF-1.0') self.assertEqual(resp.outputs['outdods'].url, href) self.assertTrue(resp.outputs['outdods'].as_reference) self.assertFalse(resp.outputs['ncraw'].as_reference) with open(os.path.join(DATA_DIR, 'netcdf', 'time.nc'), 'rb') as f: data = f.read() self.assertEqual(resp.outputs['ncraw'].data, data)
def test_input_default(self): """Test input parsing """ my_process = create_complex_proces() service = Service(processes=[my_process]) self.assertEqual(len(service.processes.keys()), 1) self.assertTrue(service.processes['my_complex_process']) class FakeRequest(): identifier = 'complex_process' service = 'wps' operation='execute' version = '1.0.0' inputs = {} raw = False outputs = {} store_execute = False lineage = False request = FakeRequest() response = service.execute('my_complex_process', request, 'fakeuuid') self.assertEqual(response.outputs['complex'].data, 'DEFAULT COMPLEX DATA')
def test_input_parser(self): """Test input parsing """ my_process = create_complex_proces() service = Service(processes=[my_process]) self.assertEqual(len(service.processes.keys()), 1) self.assertTrue(service.processes['my_complex_process']) class FakeRequest(): identifier = 'complex_process' service='wps' operation='execute' version='1.0.0' inputs = {'complex': [{ 'identifier': 'complex', 'mimeType': 'text/gml', 'data': 'the data' }]} request = FakeRequest(); try: service.execute('my_complex_process', request, 'fakeuuid') except InvalidParameterValue as e: self.assertEqual(e.locator, 'mimeType') request.inputs['complex'][0]['mimeType'] = 'application/gml' parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs['complex']) # TODO parse outputs and their validators too self.assertEqual(parsed_inputs[0].data_format.validate, emptyvalidator) request.inputs['complex'][0]['mimeType'] = 'application/xml+gml' try: parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs['complex']) except InvalidParameterValue as e: self.assertEqual(e.locator, 'mimeType') try: my_process.inputs[0].data_format = Format(mime_type='application/xml+gml') except InvalidParameterValue as e: self.assertEqual(e.locator, 'mimeType') frmt = Format(mime_type='application/xml+gml', validate=validategml) self.assertEqual(frmt.validate, validategml) my_process.inputs[0].supported_formats = [frmt] my_process.inputs[0].data_format = Format(mime_type='application/xml+gml') parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs['complex']) self.assertEqual(parsed_inputs[0].data_format.validate, validategml)
def test_input_parser(self): """Test input parsing """ my_process = create_complex_proces() service = Service(processes=[my_process]) self.assertEqual(len(service.processes.keys()), 1) self.assertTrue(service.processes["my_complex_process"]) class FakeRequest: identifier = "complex_process" service = "wps" version = "1.0.0" inputs = {"complex": [{"identifier": "complex", "mimeType": "text/gml", "data": "the data"}]} request = FakeRequest() try: service.execute("my_complex_process", request, "fakeuuid") except InvalidParameterValue as e: self.assertEqual(e.locator, "mimeType") request.inputs["complex"][0]["mimeType"] = "application/gml" parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs["complex"]) # TODO parse outputs and their validators too self.assertEqual(parsed_inputs[0].data_format.validate, emptyvalidator) request.inputs["complex"][0]["mimeType"] = "application/xml+gml" try: parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs["complex"]) except InvalidParameterValue as e: self.assertEqual(e.locator, "mimeType") try: my_process.inputs[0].data_format = Format(mime_type="application/xml+gml") except InvalidParameterValue as e: self.assertEqual(e.locator, "mimeType") frmt = Format(mime_type="application/xml+gml", validate=validategml) self.assertEqual(frmt.validate, validategml) my_process.inputs[0].supported_formats = [frmt] my_process.inputs[0].data_format = Format(mime_type="application/xml+gml") parsed_inputs = service.create_complex_inputs(my_process.inputs[0], request.inputs["complex"]) self.assertEqual(parsed_inputs[0].data_format.validate, validategml)
Buffer(), Area(), Box(), TestJson() ] # For the process list on the home page process_descriptor = {} for process in processes: abstract = process.abstract identifier = process.identifier process_descriptor[identifier] = abstract # This is, how you start PyWPS instance service = Service(processes, ['pywps.cfg']) @app.route("/") def hello(): server_url = pywps.configuration.get_config_value("server", "url") request_url = flask.request.url return flask.render_template('home.html', request_url=request_url, server_url=server_url, process_descriptor=process_descriptor) @app.route('/wps', methods=['GET', 'POST']) def wps():
def setUp(self): def hello(request): pass def ping(request): pass processes = [Process(hello, 'hello', 'Process Hello'), Process(ping, 'ping', 'Process Ping')] self.client = client_for(Service(processes=processes))
def test_metalink(self): client = client_for(Service(processes=[create_metalink_process()])) resp = client.get('?Request=Execute&identifier=multiple-outputs') assert resp.status_code == 400
def test_wps_blocking(): client = client_for(Service(processes=[Blocking()], cfgfiles=[".custom.cfg"])) resp = client.get( service='WPS', request='Execute', version='1.0.0', identifier='blocking') print(resp.data) assert_response_success(resp)
def setUp(self): self.client = client_for(Service(processes=[]))
def test_wps_error(): client = client_for(Service(processes=[ShowError()])) datainputs = "message=tomorrow-is-another-day;nice=true" client.get( "?service=WPS&request=Execute&version=1.0.0&identifier=hello&datainputs={}" .format(datainputs))
def test_GEPS(self): client = client_for( Service(processes=[HindcastingProcess()], cfgfiles=CFG_FILE)) # model = 'HMETS' # params = '9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, ' \ # '2.6851, 0.3740, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947' params = "0.529, -3.396, 407.29, 1.072, 16.9, 0.947" forecast_model = "GEPS" region_vector = get_local_testdata("watershed_vector/LSJ_LL.zip") pr = json.dumps({ "pr": { "linear_transform": (1.0, 0.0), "time_shift": -0.25, "deaccumulate": True, } }) tas = json.dumps( {"tas": { "linear_transform": (1.0, 0.0), "time_shift": -0.25 }}) rvc = get_local_testdata("gr4j_cemaneige/solution.rvc") hdate = dt.datetime(2018, 6, 1) # Date of the forecast that will be used to determine the members of the climatology-based ESP # (same day of year of all other years) datainputs = ("gr4jcn={params};" "latitude={latitude};" "longitude={longitude};" "name={name};" "area={area};" "duration={duration};" "elevation={elevation};" "forecast_model={forecast_model};" "region_vector=file@xlink:href=file://{region_vector};" "rain_snow_fraction={rain_snow_fraction};" "nc_spec={pr};" "nc_spec={tas};" "rvc=file@xlink:href=file://{rvc};" "hdate={hdate};".format( params=params, latitude=54.4848, longitude=-123.3659, name="Salmon", area="4250.6", duration=8, elevation="843.0", forecast_model=forecast_model, region_vector=region_vector, rain_snow_fraction="RAINSNOW_DINGMAN", pr=pr, tas=tas, rvc=rvc, hdate=hdate, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="hindcasting", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "hydrograph" in out forecast, _ = urlretrieve(out["hydrograph"]) q_sim = xr.open_dataset(forecast)["q_sim"] np.testing.assert_almost_equal( q_sim.isel(time=-1).mean(), [12.585823219473196]) assert "member" in q_sim.dims
def deploy(): processes.append(DockerRun("newprocess")) mywps[0] = Service(processes, ['pywps.cfg'])
def test_simple(self): client = client_for( Service(processes=[OstrichMOHYSEProcess()], cfgfiles=CFG_FILE) ) low_p = "0.01, 0.01, 0.01, -5.00, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01" high_p = "20.0, 1.0, 20.0, 5.0, 0.5, 1.0, 1.0, 1.0, 15.0, 15.0" datainputs = ( "ts=files@xlink:href=file://{ts};" "algorithm={algorithm};" "max_iterations={max_iterations};" "lowerBounds={low_p};" "upperBounds={high_p};" "start_date={start_date};" "duration={duration};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};" "random_numbers=file@xlink:href=file://{random_numbers};" "random_seed=0".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), algorithm="DDS", max_iterations=10, low_p=low_p, high_p=high_p, start_date=dt.datetime(1954, 1, 1), duration=208, name="Salmon", run_name="test", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, random_numbers=get_local_testdata( "ostrich-gr4j-cemaneige/OstRandomNumbers.txt" ), ) ) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="ostrich-mohyse", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2010 with budget 50 would be NSE=0.5779910) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 0.3826810, 4, err_msg="NSE is not matching expected value" ) # checking correctness of RMSE (full period 1954-2010 would be RMSE=????) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 40.7086, 4, err_msg="RMSE is not matching expected value" )
def setUp(self): configuration.get_config_value('server', 'language') configuration.CONFIG.set('server', 'language', 'en-US,fr-CA') self.client = client_for( Service(processes=[ Process( lambda: None, "pr1", "Process 1", abstract="Process 1", inputs=[ LiteralInput('input1', title='Input name', abstract='Input abstract', translations={ "fr-CA": { "title": "Nom de l'input", "abstract": "Description" } }) ], outputs=[ LiteralOutput('output1', title='Output name', abstract='Output abstract', translations={ "fr-CA": { "title": "Nom de l'output", "abstract": "Description" } }) ], translations={ "fr-CA": { "title": "Processus 1", "abstract": "Processus 1" } }, ), Process( lambda: None, "pr2", "Process 2", abstract="Process 2", inputs=[ LiteralInput('input1', title='Input name', abstract='Input abstract', translations={ "fr-CA": { "abstract": "Description" } }) ], outputs=[ LiteralOutput('output1', title='Output name', abstract='Output abstract', translations={ "fr-CA": { "abstract": "Description" } }) ], translations={"fr-CA": { "title": "Processus 2" }}, ), ]))
def client(): return client_for(Service(processes=finch.processes.processes, cfgfiles=CFG_FILE))
def __init__(self, configuration_file, configuration_url): self.service = Service(cfgfiles=[configuration_file]) self.configuration = ConfigurationGateway(configuration_url)
def test_GEPS(self): client = client_for( Service( processes=[RealtimeForecastProcess()], cfgfiles=CFG_FILE, ) ) # # model = 'HMETS' # params = '9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, ' \ # '2.6851, 0.3740, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947' params = "0.529, -3.396, 407.29, 1.072, 16.9, 0.947" forecast_model = "GEPS" region_vector = get_local_testdata("watershed_vector/LSJ_LL.zip") pr = json.dumps( { "pr": { "scale": 1, "offset": 0, "time_shift": -0.25, "deaccumulate": True, } } ) tas = json.dumps({"tas": {"scale": 1, "offset": 0, "time_shift": -0.25}}) rvc = get_local_testdata("gr4j_cemaneige/solution.rvc") # Date of the forecast that will be used to determine the members of the climatology-based ESP # (same day of year of all other years) datainputs = ( "gr4jcn={params};" "latitude={latitude};" "longitude={longitude};" "name={name};" "area={area};" "duration={duration};" "elevation={elevation};" "forecast_model={forecast_model};" "region_vector=file@xlink:href=file://{region_vector};" "rain_snow_fraction={rain_snow_fraction};" "nc_spec={pr};" "nc_spec={tas};" "rvc=file@xlink:href=file://{rvc};".format( params=params, latitude=54.4848, longitude=-123.3659, name="Salmon", area="4250.6", duration=3, elevation="843.0", forecast_model=forecast_model, region_vector=region_vector, rain_snow_fraction="RAINSNOW_DINGMAN", pr=pr, tas=tas, rvc=rvc, ) ) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="realtime-forecast", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "hydrograph" in out
def test_simple(self): client = client_for( Service(processes=[OstrichHBVECProcess()], cfgfiles=CFG_FILE)) lowerBounds = "-3.0, 0.0, 0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.01, 0.05, 0.01, \ 0.0, 0.0, 0.0, 0.0, 0.0, 0.01, 0.0, 0.05, 0.8, 0.8" upperBounds = "3.0, 8.0, 8.0, 0.1, 1.0, 1.0, 7.0, 100.0, 1.0, 0.1, 6.0, 5.0, \ 5.0, 0.2, 1.0, 30.0, 3.0, 2.0, 1.0, 1.5, 1.5" datainputs = ( "ts=files@xlink:href=file://{ts};" "algorithm={algorithm};" "max_iterations={max_iterations};" "lowerBounds={lowerBounds};" "upperBounds={upperBounds};" "start_date={start_date};" "duration={duration};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};" "random_numbers=file@xlink:href=file://{random_numbers};" "random_seed=0".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), algorithm="DDS", max_iterations=10, lowerBounds=lowerBounds, upperBounds=upperBounds, start_date=dt.datetime(1954, 1, 1), duration=208, name="Salmon", run_name="test", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, random_numbers=get_local_testdata( "ostrich-gr4j-cemaneige/OstRandomNumbers.txt"), )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="ostrich-hbv-ec", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # TODO Julie :: values not adjusted yet!!! WPS needs to work first ... # checking correctness of NSE (full period 1954-2010 with budget 50 would be NSE=6.034670E-01) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, -0.225991, 4, err_msg="NSE is not matching expected value") # checking correctness of RMSE (full period 1954-2010 would be RMSE=????) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 57.3687, 4, err_msg="RMSE is not matching expected value")
def test_simple(self): client = client_for( Service(processes=[OstrichGR4JCemaNeigeProcess()], cfgfiles=CFG_FILE)) lowerBounds = "0.01, -15.0, 10.0, 0.0, 1.0, 0.0" upperBounds = "2.5, 10.0, 700.0, 7.0, 30.0, 1." # some params in Raven input files are derived from those 21 parameters # pdefaults.update({'GR4J_X1_hlf': pdefaults['GR4J_X1']*1000./2.0}) --> x1 * 1000. / 2.0 # pdefaults.update({'one_minus_CEMANEIGE_X2': 1.0 - pdefaults['CEMANEIGE_X2']}) --> 1.0 - x6 datainputs = ( "ts=files@xlink:href=file://{ts};" "algorithm={algorithm};" "max_iterations={max_iterations};" "lowerBounds={lowerBounds};" "upperBounds={upperBounds};" "start_date={start_date};" "duration={duration};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};" "random_numbers=files@xlink:href=file://{random_numbers};" "random_seed=0;".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), algorithm="DDS", max_iterations=10, lowerBounds=lowerBounds, upperBounds=upperBounds, start_date=dt.datetime(1954, 1, 1), duration=208, name="Salmon", run_name="test", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, random_numbers=get_local_testdata( "ostrich-gr4j-cemaneige/OstRandomNumbers.txt"), )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="ostrich-gr4j-cemaneige", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2010 with budget of 50 would be NSE=0.5779910) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 0.50717, 4, err_msg="NSE is not matching expected value") # checking correctness of RMSE (full period 1954-2010 with budget of 50 would be RMSE=????) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 36.373, 4, err_msg="RMSE is not matching expected value") assert "rv_config" in out rv_config, _ = urlretrieve(out["rv_config"]) z = zipfile.ZipFile(rv_config) assert len(z.filelist) == 7 assert "hydrograph" in out
def test_simple(self): client = client_for( Service(processes=[RavenHMETSProcess()], cfgfiles=CFG_FILE)) params = ( "9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, " "2.6851, 0.3740, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947" ) datainputs = ( "ts=files@xlink:href=file://{ts};" "params={params};" "start_date={start_date};" "end_date={end_date};" "init={init};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), params=params, start_date=dt.datetime(2000, 1, 1), end_date=dt.datetime(2002, 1, 1), init="155,455", name="Salmon", run_name="test-hmets", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="raven-hmets", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2011 would be NSE=0.636015 as template in Wiki) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, -3.0132, 4, err_msg="NSE is not matching expected value") # checking correctness of RMSE (full period 1954-2011 would be RMSE=28.3759 as template in wiki) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 71.9223, 4, err_msg="RMSE is not matching expected value")
from flask import Flask from pywps import Service from .docker_generic import DockerProcessFiles from .docker_process import DockerRun from .deploy_process import DeployProcess from flask import url_for, redirect app = Flask(__name__) deploy = DeployProcess() processes = [DockerProcessFiles(), deploy] mywps = [Service(processes, ['pywps.cfg'])] @app.route('/', methods=['GET']) def root(): return redirect(url_for('wps') + "?service=WPS&request=GetCapabilities") @app.route('/deploy', methods=['GET']) def deploy(): processes.append(DockerRun("newprocess")) mywps[0] = Service(processes, ['pywps.cfg']) @app.route('/wps', methods=['GET', 'POST']) def wps(): return mywps[0] if __name__ == '__main__':
def test_simple(self): client = client_for( Service(processes=[ OstrichMOHYSEProcess(), ], cfgfiles=CFG_FILE)) low_p = '0.01, 0.01, 0.01, -5.00, 0.01, 0.01, 0.01, 0.01' high_p = '20.0, 1.0, 20.0, 5.0, 0.5, 1.0, 1.0, 1.0' low_h = '0.01, 0.01' high_h = '15.0, 15.0' datainputs = "ts=files@xlink:href=file://{ts};" \ "algorithm={algorithm};" \ "max_iterations={max_iterations};" \ "lowerBounds={low_p};" \ "upperBounds={high_p};" \ "hruslowerBounds={low_h};" \ "hrusupperBounds={high_h};" \ "start_date={start_date};" \ "duration={duration};" \ "name={name};" \ "run_name={run_name};" \ "area={area};" \ "latitude={latitude};" \ "longitude={longitude};" \ "elevation={elevation};" \ "random_seed=0" \ .format(ts=TESTDATA['ostrich-mohyse-nc-ts'], algorithm='DDS', max_iterations=10, low_p=low_p, high_p=high_p, low_h=low_h, high_h=high_h, start_date=dt.datetime(1954, 1, 1), duration=208, name='Salmon', run_name='test', area='4250.6', elevation='843.0', latitude=54.4848, longitude=-123.3659, ) resp = client.get(service='WPS', request='Execute', version='1.0.0', identifier='ostrich-mohyse', datainputs=datainputs) assert_response_success(resp) out = get_output(resp.xml) assert 'diagnostics' in out tmp_file, _ = urlretrieve(out['diagnostics']) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2010 with budget 50 would be NSE=0.5779910) assert 'DIAG_NASH_SUTCLIFFE' in tmp_content[0] idx_diag = tmp_content[0].split(',').index("DIAG_NASH_SUTCLIFFE") diag = np.float(tmp_content[1].split(',')[idx_diag]) np.testing.assert_almost_equal( diag, 0.3826810, 4, err_msg='NSE is not matching expected value') # checking correctness of RMSE (full period 1954-2010 would be RMSE=????) assert 'DIAG_RMSE' in tmp_content[0] idx_diag = tmp_content[0].split(',').index("DIAG_RMSE") diag = np.float(tmp_content[1].split(',')[idx_diag]) np.testing.assert_almost_equal( diag, 40.7086, 4, err_msg='RMSE is not matching expected value')
from pywps import Service from ecocloud_wps_demo.processes.anuclim_daily_extract import ANUClimDailyExtract from ecocloud_wps_demo.processes.anuclim_daily_extract_netcdf4 import ANUClimDailyExtractNetCDF4 from ecocloud_wps_demo.processes.spatial_subset_geotiff import SpatialSubsetGeotiff from ecocloud_wps_demo.processes.spatial_subset_netcdf import SpatialSubsetNetcdf from ecocloud_wps_demo.processes.exploratory_data_box import ExploratoryDataBox from ecocloud_wps_demo.processes.exploratory_data_histogram import ExploratoryDataHistogram from ecocloud_wps_demo.processes.exploratory_data_density import ExploratoryDataDensity from ecocloud_wps_demo.processes.exploratory_data_correlation import ExploratoryDataCorrelation processes = [ ANUClimDailyExtract(), ANUClimDailyExtractNetCDF4(), SpatialSubsetGeotiff(), SpatialSubsetNetcdf(), ExploratoryDataBox(), ExploratoryDataHistogram(), ExploratoryDataDensity(), ExploratoryDataCorrelation(), ] service = Service(processes, ['/etc/ecocloud/pywps.cfg']) @view_config(route_name='wps') @wsgiapp2 def wps_app(environ, start_response): response = service(environ, start_response) return response
def __init__(self, executor, configuration_file): self.service = Service(cfgfiles=[configuration_file]) self.executor = executor
def test_hindcast_evaluation_rank_histo(self): client = client_for( Service(processes=[ClimpredHindcastVerificationProcess()], cfgfiles=CFG_FILE)) # Prepare the model parameters and forecast details model = "GR4JCN" params = (0.529, -3.396, 407.29, 1.072, 16.9, 0.947) forecast_duration = 3 ts = get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc" ) rvc = get_local_testdata("gr4j_cemaneige/solution.rvc") # Make the hindcasts for each initialization date. Here we will extract # ESP forecasts for a given calendar date for the years in "included_years" # as hindcast dates. Each ESP hindcast uses all available data in the ts dataset, # so in this case we will have 56/57 members for each hindcast initialization # depending on the date that we start on. The "hindcasts" dataset contains # all of the flow data from the ESP hindcasts for the initialization dates. # The "qobs" dataset contains all qobs in the timeseries: Climpred will # sort it all out during its processing. Note that the format of these datasets # is tailor-made to be used in climpred, and thus has specific dimension names. hcasts, obs = make_ESP_hindcast_dataset( model_name=model, forecast_date=dt.datetime(1955, 6, 30), included_years=list(range(1957, 1959)), forecast_duration=forecast_duration, ts=ts, area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, params=params, rvc=str(rvc), ) tmpdir = tempfile.mkdtemp() tmpfile_hcst = tmpdir + "/hcst_wps.nc" tmpfile_obs = tmpdir + "/qobs_wps.nc" hcasts.to_netcdf(tmpfile_hcst) obs.to_netcdf(tmpfile_obs) metric = "rank_histogram" datainputs = ("hindcasts=files@xlink:href=file://{hindcasts};" "observations=files@xlink:href=file://{observations};" "metric={metric}".format( hindcasts=tmpfile_hcst, observations=tmpfile_obs, metric=metric, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="climpred_hindcast_verification", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) metrics, _ = urlretrieve(out["verification_metrics"])
def test_post_with_no_inputs(self): client = client_for(Service(processes=[create_ultimate_question()])) request_doc = WPS.Execute(OWS.Identifier('ultimate_question')) resp = client.post_xml(doc=request_doc) assert_response_success(resp) assert get_output(resp.xml) == {'outvalue': '42'}
def test_parallel(self): client = client_for( Service(processes=[RavenHMETSProcess()], cfgfiles=CFG_FILE)) params1 = ( "9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, " "2.6851, 0.3740, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947" ) params2 = ( "9.5, 0.3, 6, 0.6, 1.2, 5.4, 2.31, 0.09, 0.04, 0.19, 0.02, -1.09, " "2.6, 0.3, 1.0000, 0.4739, 0.0114, 0.0243, 0.0069, 310.7211, 916.1947" ) params3 = ( "9.5019, 0.2774, 6.3942, 0.6884, 1.2875, 5.4134, 2.3641, 0.0973, 0.0464, 0.1998, 0.0222, -1.0919, " "2.0, 0.37, 1.0000, 0.4, 0.01, 0.03, 0.01, 300.0, 920.0") # some params in Raven input files are derived from those 21 parameters # pdefaults.update({'GR4J_X1_hlf': pdefaults['GR4J_X1']*1000./2.0}) --> x1 * 1000. / 2.0 # pdefaults.update({'one_minus_CEMANEIGE_X2': 1.0 - pdefaults['CEMANEIGE_X2']}) --> 1.0 - x6 datainputs = ( "ts=files@xlink:href=file://{ts};" "params={params1};" "params={params2};" "params={params3};" "start_date={start_date};" "end_date={end_date};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), params1=params1, params2=params2, params3=params3, start_date=dt.datetime(2000, 1, 1), end_date=dt.datetime(2002, 1, 1), name="Salmon", run_name="test", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="raven-hmets", datainputs=datainputs, ) assert_response_success(resp) tmp_file, _ = urlretrieve(get_output(resp.xml)["hydrograph"]) ds = xr.open_dataset(tmp_file) assert ds.variables["q_sim"].shape[0] == 3
def test_simple_alternate_hrus_param(self): client = client_for( Service(processes=[RavenGR4JCemaNeigeProcess()], cfgfiles=CFG_FILE)) params = "0.529, -3.396, 407.29, 1.072, 16.9, 0.947" # some params in Raven input files are derived from those 21 parameters # pdefaults.update({'GR4J_X1_hlf': pdefaults['GR4J_X1']*1000./2.0}) --> x1 * 1000. / 2.0 # pdefaults.update({'one_minus_CEMANEIGE_X2': 1.0 - pdefaults['CEMANEIGE_X2']}) --> 1.0 - x6 salmon_land_hru_1 = dict(area=4250.6, elevation=843.0, latitude=54.4848, longitude=-123.3659) hrus = (GR4JCN.LandHRU(**salmon_land_hru_1), ) hrus = json.dumps(list(map(asdict, hrus))) datainputs = ( "ts=files@xlink:href=file://{ts};" "params={params};" "start_date={start_date};" "end_date={end_date};" "name={name};" "run_name={run_name};" "hrus={hrus};".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), params=params, start_date=dt.datetime(2000, 1, 1), end_date=dt.datetime(2002, 1, 1), name="Salmon", run_name="test", hrus=hrus, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="raven-gr4j-cemaneige", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2010 would be NSE=0.511214) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, -0.117301, 4, err_msg="NSE is not matching expected value") # checking correctness of RMSE (full period 1954-2010 would be RMSE=32.8827) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 37.9493, 4, err_msg="RMSE is not matching expected value") assert "rv_config" in out rv_config, _ = urlretrieve(out["rv_config"]) z = zipfile.ZipFile(rv_config) assert len(z.filelist) == 5
import flask from flask_cors import CORS, cross_origin from pywps import Service from wps.sayHiWps import SayHiWps app = flask.Flask(__name__) cors = CORS(app) processes = [SayHiWps()] service = Service(processes=processes, cfgfiles=['wps/pywps.cfg']) @app.route('/wps/', methods=['GET', 'POST']) @cross_origin(headers=['Content-Type']) def wps(): return service @app.route('/outputs/<path:filename>') def outputfile(filename): targetfile = os.path.join('outputs', filename) if os.path.isfile(targetfile): file_ext = os.path.splitext(targetfile)[1] with open(targetfile, mode='rb') as f: file_bytes = f.read() mime_type = None if 'xml' in file_ext: mime_type = 'text/xml' return flask.Response(file_bytes, content_type=mime_type) else: flask.abort(404)
def test_simple(self): client = client_for( Service(processes=[RavenHBVECProcess()], cfgfiles=CFG_FILE)) params = ( "0.05984519, 4.072232, 2.001574, 0.03473693, 0.09985144, 0.5060520, 3.438486, 38.32455, " "0.4606565, 0.06303738, 2.277781, 4.873686, 0.5718813, 0.04505643, 0.877607, 18.94145, " "2.036937, 0.4452843, 0.6771759, 1.141608, 1.024278") datainputs = ( "ts=files@xlink:href=file://{ts};" "params={params};" "start_date={start_date};" "end_date={end_date};" "init={init};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), params=params, start_date=dt.datetime(2000, 1, 1), end_date=dt.datetime(2002, 1, 1), init="155,455", name="Salmon", run_name="test-hbv-ec", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="raven-hbv-ec", datainputs=datainputs, ) assert_response_success(resp) out = get_output(resp.xml) assert "diagnostics" in out tmp_file, _ = urlretrieve(out["diagnostics"]) tmp_content = open(tmp_file).readlines() # checking correctness of NSE (full period 1954-2011 would be NSE=0.591707 as template in Wiki) assert "DIAG_NASH_SUTCLIFFE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_NASH_SUTCLIFFE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 0.0186633, 4, err_msg="NSE is not matching expected value") # checking correctness of RMSE (full period 1954-2011 would be RMSE=30.0535 as template in Wiki) assert "DIAG_RMSE" in tmp_content[0] idx_diag = tmp_content[0].split(",").index("DIAG_RMSE") diag = float(tmp_content[1].split(",")[idx_diag]) np.testing.assert_almost_equal( diag, 35.5654, 3, err_msg="RMSE is not matching expected value")
def test_get_with_no_inputs(self): client = client_for(Service(processes=[create_ultimate_question()])) resp = client.get('?service=wps&version=1.0.0&Request=Execute&identifier=ultimate_question') assert_response_success(resp) assert get_output(resp.xml) == {'outvalue': '42'}
def test_parallel(self): client = client_for( Service(processes=[RavenHBVECProcess()], cfgfiles=CFG_FILE)) params1 = ( "0.05984519, 4.072232, 2.001574, 0.03473693, 0.09985144, 0.5060520, 3.438486, 38.32455, " "0.4606565, 0.06303738, 2.277781, 4.873686, 0.5718813, 0.04505643, 0.877607, 18.94145, " "2.036937, 0.4452843, 0.6771759, 1.141608, 1.024278") params2 = ("0.05, 4.07, 2.00, 0.03, 0.099, 0.506, 3.43, 38., " "0.47, 0.06, 2.2, 4.87, 0.5, 0.0, 0.8, 18.5, " "2.0, 0.445, 0.677, 1.14, 1.02") params3 = ( "0.05984519, 4.072232, 2.001574, 0.03473693, 0.09985144, 0.5060520, 3.438486, 38.32455, " "0.4606565, 0.06303738, 2.277781, 4.873686, 0.5718813, 0.04505643, 0.877607, 18.94145, " "2.036937, 0.4452843, 0.6771759, 1.141608, 1.024278") # NOTE THAT ALL PARAMETERS IN SETS 1 and 3 ARE THE SAME. THEY SHOULD RETURN THE SAME RESULTS. datainputs = ( "ts=files@xlink:href=file://{ts};" "params={params1};" "params={params2};" "params={params3};" "start_date={start_date};" "end_date={end_date};" "name={name};" "run_name={run_name};" "area={area};" "latitude={latitude};" "longitude={longitude};" "elevation={elevation};".format( ts=get_local_testdata( "raven-gr4j-cemaneige/Salmon-River-Near-Prince-George_meteo_daily.nc", ), params1=params1, params2=params2, params3=params3, start_date=dt.datetime(2000, 1, 1), end_date=dt.datetime(2002, 1, 1), name="Salmon", run_name="test", area="4250.6", elevation="843.0", latitude=54.4848, longitude=-123.3659, )) resp = client.get( service="WPS", request="Execute", version="1.0.0", identifier="raven-hbv-ec", datainputs=datainputs, ) assert_response_success(resp) tmp_file, _ = urlretrieve(get_output(resp.xml)["hydrograph"]) ds = xr.open_dataset(tmp_file) assert ds.variables["q_sim"].shape[0] == 3 # THIS TEST FAILS. PARAMETERS ARE NOT PASSED IN ORDER...? assert all(ds.variables["q_sim"][0, :] == ds.variables["q_sim"][2, :])
def test_missing_process_error(self): client = client_for(Service(processes=[create_ultimate_question()])) resp = client.get('?Request=Execute&identifier=foo') assert resp.status_code == 400
def describe_process(self, process): client = client_for(Service(processes=[process])) resp = client.get('?service=wps&version=1.0.0&Request=DescribeProcess&identifier=%s' % process.identifier) [result] = get_describe_result(resp) return result