def create_app(): service = Service(processes=[ Process(say_hello, inputs=[LiteralInput('name', 'string')], outputs=[LiteralOutput('response', 'string')]), Process(feature_count, inputs=[ComplexInput('layer', [Format(Formats.GML)])], outputs=[ComplexInput('layer', [Format(Formats.GML)])]), Process(centroids, inputs=[ComplexInput('layer', [Format(Formats.GML)])]), ]) app = flask.Flask(__name__) @app.route('/') def home(): url = flask.url_for('wps', _external=True) return flask.render_template('home.html', url=url) @app.route('/wps', methods=['GET', 'POST']) def wps(): return service @app.route('/datafile/<uuid>') def datafile(uuid): for data_file in recent_data_files: if data_file['uuid'] == uuid: return flask.Response(data_file['bytes']) else: flask.abort(404) return app
def esgf_api(F): inputs = [ ComplexInput('variable', 'variable', abstract="", supported_formats=[FORMATS.JSON], min_occurs=1, max_occurs=1, mode=MODE.SIMPLE ), ComplexInput('domain', 'domain', abstract="", supported_formats=[FORMATS.JSON], min_occurs=1, max_occurs=1, mode=MODE.SIMPLE ), ComplexInput('operation', 'operation', abstract="", supported_formats=[FORMATS.JSON], min_occurs=0, max_occurs=1, mode=MODE.SIMPLE ), ] outputs = [ ComplexOutput('output', 'Output', as_reference=False, supported_formats=[FORMATS.JSON], ), ] def wrapper(self): F(self) self.profile.append('ESGF-API') self.inputs.extend(inputs) self.outputs.extend(outputs) return wrapper
def __init__(self): inputs = [ ComplexInput('datafile', 'GeoTIFF datafile', supported_formats=[Format('image/tiff')], min_occurs=1, max_occurs=1, # NOTE: Can't validate GeoTIFFs at the moment mode=MODE.NONE), ComplexInput('shapefile', '.zip file representing ESRI Shapefile of geometry to use for subset', supported_formats=[Format('application/zip')], min_occurs=1, max_occurs=1, # NOTE: No validator for ZIP files mode=MODE.NONE), ] outputs = [ ComplexOutput('output', 'Output data', as_reference=True, supported_formats=[Format('text/plain')]), ] super(SpatialSubsetGeotiff, self).__init__( self._handler, identifier='spatial_subset_geotiff', title='GeoTIFF data spatial subset', abstract="Subsets a given GeoTIFF file with given spatial data/geometry", version='1', metadata=[], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def test_build_input_file_name(self): from pywps.inout.basic import ComplexInput h = ComplexInput('ci') h.workdir = workdir = tempfile.mkdtemp() self.assertEqual(h._build_file_name('http://path/to/test.txt'), os.path.join(workdir, 'test.txt')) self.assertEqual(h._build_file_name('http://path/to/test'), os.path.join(workdir, 'test')) self.assertEqual(h._build_file_name('file://path/to/.config'), os.path.join(workdir, '.config')) self.assertEqual( h._build_file_name( 'https://path/to/test.txt?token=abc&expires_at=1234567'), os.path.join(workdir, 'test.txt')) h.supported_formats = [ FORMATS.TEXT, ] h.data_format = FORMATS.TEXT self.assertEqual(h._build_file_name('http://path/to/test'), os.path.join(workdir, 'test.txt')) open(os.path.join(workdir, 'duplicate.html'), 'a').close() inpt_filename = h._build_file_name('http://path/to/duplicate.html') self.assertTrue( inpt_filename.startswith(os.path.join(workdir, 'duplicate_'))) self.assertTrue(inpt_filename.endswith('.html'))
def __init__(self): inputs = [ ComplexInput( "fcst", "Stream flow forecasts", abstract="Stream flow forecast time series", supported_formats=[FORMATS.NETCDF], ), LiteralInput( "fcst_var", "Streamflow forecast variable name", abstract="Name of the streamflow variable in fcst", data_type="string", min_occurs=0, max_occurs=1, default="q_sim", ), ComplexInput( "qobs", "Stream flow observation", abstract="Stream flow observation for hindcasting", supported_formats=[FORMATS.NETCDF], min_occurs=0, max_occurs=1, ), LiteralInput( "qobs_var", "Streamflow observation variable name", abstract="Name of the streamflow variable in qobs", data_type="string", min_occurs=0, max_occurs=1, default="q_obs", ), ] outputs = [ ComplexOutput( "graph_forecasts", "Figure showing the forecast hydrographs.", abstract="Figure showing the forecast hydrographs", as_reference=True, supported_formats=(Format(mime_type="image/png"), ), ), ] super(GraphFcstUncertaintyProcess, self).__init__( self._handler, identifier="graph_forecast_uncertainty", title="", version="1.0", abstract="", metadata=[], inputs=inputs, outputs=outputs, keywords=[], status_supported=True, store_supported=True, )
def test_complex_input_identifier(self): complex_in = ComplexInput('foo', 'Complex foo', supported_formats=[Format('bar/baz')]) doc = complex_in.describe_xml() self.assertEqual(doc.tag, E.Input().tag) [identifier_el] = xpath_ns(doc, './ows:Identifier') self.assertEqual(identifier_el.text, 'foo')
def test_complex_input_identifier(self): complex_in = ComplexInput('foo', 'Complex foo', keywords=['kw1', 'kw2'], supported_formats=[Format('bar/baz')]) doc = complex_in.describe_xml() self.assertEqual(doc.tag, E.Input().tag) [identifier_el] = xpath_ns(doc, './ows:Identifier') self.assertEqual(identifier_el.text, 'foo') kws = xpath_ns(doc, './ows:Keywords/ows:Keyword') self.assertEqual(len(kws), 2)
def test_complex_input_identifier(self): complex_in = ComplexInput('foo', 'Complex foo', supported_formats=[Format('bar/baz')]) doc = complex_in.describe_xml() assert doc.tag == E.Input().tag [identifier_el] = xpath_ns(doc, './ows:Identifier') assert identifier_el.text == 'foo'
def __init__(self): hindcasts = ComplexInput( "hindcasts", "3-dimensional xarray dataset / netcdf with hindcasts", abstract= "The 3D netcdf dataset that contains the init, member and lead variables", supported_formats=[FORMATS.NETCDF], min_occurs=1, max_occurs=1, ) observations = ComplexInput( "observations", "1-dimensional xarray dataset / netcdf with flow observations", abstract= "The 1D netcdf with the observed streamflow for verification", supported_formats=[FORMATS.NETCDF], min_occurs=1, max_occurs=1, ) metric = LiteralInput( "metric", 'Verification metric. Can be ["rank_histogram","crps" or "reliability"]', data_type="string", abstract= 'Name of the verification metric. Can be ["rank_histogram","crps" or "reliability"]', min_occurs=1, max_occurs=1, ) inputs = [hindcasts, observations, metric] outputs = [ ComplexOutput( "verification_metrics", "The verification_metrics dataset as computed by climpred, ready to plot.", supported_formats=[FORMATS.NETCDF], abstract= "Netcdf file including the verification metrics that can be used for plotting hindcast performance. Contents vary according to input metric", as_reference=True, ) ] super(ClimpredHindcastVerificationProcess, self).__init__( self._handler, identifier="climpred_hindcast_verification", title="", version="1.0", abstract="", metadata=[], inputs=inputs, outputs=outputs, keywords=[], status_supported=True, store_supported=True, )
def __init__(self): inputs = [ ComplexInput('ts', 'Stream flow time series', abstract='Stream flow time series', supported_formats=[FORMATS.NETCDF]), ComplexInput( 'params', 'Distribution parameters', abstract= 'Statistical distribution parameters fitted to time series', supported_formats=[FORMATS.NETCDF]), LiteralInput( 'variable', "Variable name", abstract= "Name of time series variable. If none will default to the first data variable" "found in file.", data_type='string', min_occurs=0, default=""), LiteralInput('format', "Output graphic format", abstract="Graphic format.", data_type='string', default='png', min_occurs=0, allowed_values=['png', 'jpeg', 'pdf']) ] outputs = [ ComplexOutput( 'graph_fit', 'Graphic', abstract= "Graphic showing time series histogram and the probability density " "function of the fitted distribution.", as_reference=True, supported_formats=( Format(mime_type='image/png'), Format(mime_type='image/jpeg'), Format(mime_type='application/pdf'), Format(mime_type='application/json'), )), ] super(GraphFitProcess, self).__init__(self._handler, identifier="graph_fit", title="", version="1.0", abstract="", metadata=[], inputs=inputs, outputs=outputs, keywords=[], status_supported=True, store_supported=True)
def _make_bccaqv2_resource_input(url): input = ComplexInput( "resource", "NetCDF resource", max_occurs=1000, supported_formats=[FORMATS.NETCDF, FORMATS.DODS], ) input.url = url return input
def input_formats(self): return [ ComplexInput( "geometry", "Geometry", supported_formats=[FORMATS["polygon"]] ), ComplexInput( "start", "Start Date", supported_formats=[FORMATS["datetime"]] ), ComplexInput("end", "End date", supported_formats=[FORMATS["datetime"]]), ]
def input_formats(self): return [ ComplexInput( "geometry", "Location (Lon, Lat)", supported_formats=[FORMATS["point"]] ), ComplexInput( "start", "Start Date", supported_formats=[FORMATS["datetime"]] ), ComplexInput("end", "End date", supported_formats=[FORMATS["datetime"]]), ]
def test_build_input_file_name(self): from pywps.inout.basic import ComplexInput h = ComplexInput('ci') h.workdir = workdir = tempfile.mkdtemp() self.assertEqual( h._build_file_name('http://path/to/test.txt'), os.path.join(workdir, 'test.txt')) self.assertEqual( h._build_file_name('http://path/to/test'), os.path.join(workdir, 'test')) self.assertEqual( h._build_file_name('file://path/to/.config'), os.path.join(workdir, '.config')) self.assertEqual( h._build_file_name('https://path/to/test.txt?token=abc&expires_at=1234567'), os.path.join(workdir, 'test.txt')) h.supported_formats = [FORMATS.TEXT, ] h.data_format = FORMATS.TEXT self.assertEqual( h._build_file_name('http://path/to/test'), os.path.join(workdir, 'test.txt')) open(os.path.join(workdir, 'duplicate.html'), 'a').close() inpt_filename = h._build_file_name('http://path/to/duplicate.html') self.assertTrue(inpt_filename.startswith(os.path.join(workdir, 'duplicate_'))) self.assertTrue(inpt_filename.endswith('.html'))
def test_complex_input_default_and_supported(self): complex = ComplexInput('foo', 'Complex foo', allowed_formats=[Format('a/b'), Format('c/d')]) doc = complex.describe_xml() [default_format] = xpath_ns(doc, './ComplexData/Default/Format') [default_mime_el] = xpath_ns(default_format, './MimeType') assert default_mime_el.text == 'a/b' supported_mime_types = [] for supported_el in xpath_ns(doc, './ComplexData/Supported/Format'): [mime_el] = xpath_ns(supported_el, './MimeType') supported_mime_types.append(mime_el.text) assert supported_mime_types == ['a/b', 'c/d']
def test_complex_input_default_and_supported(self): complex_in = ComplexInput("foo", "Complex foo", supported_formats=[Format("a/b"), Format("c/d")]) doc = complex_in.describe_xml() [default_format] = xpath_ns(doc, "./ComplexData/Default/Format") [default_mime_el] = xpath_ns(default_format, "./MimeType") assert default_mime_el.text == "a/b" supported_mime_types = [] for supported_el in xpath_ns(doc, "./ComplexData/Supported/Format"): [mime_el] = xpath_ns(supported_el, "./MimeType") supported_mime_types.append(mime_el.text) assert supported_mime_types == ["a/b", "c/d"]
def test_complex_input_default_and_supported(self): complex = ComplexInput('foo', [Format('a/b'), Format('c/d')]) doc = complex.describe_xml() [default_format] = xpath_ns(doc, './ComplexData/Default/Format') [default_mime_el] = xpath_ns(default_format, './ows:MimeType') assert default_mime_el.text == 'a/b' supported_mime_types = [] for supported_el in xpath_ns(doc, './ComplexData/Supported/Format'): [mime_el] = xpath_ns(supported_el, './ows:MimeType') supported_mime_types.append(mime_el.text) assert supported_mime_types == ['a/b', 'c/d']
def __init__(self): inputs = [ ComplexInput('resource', 'NetCDF resource', abstract='NetCDF files, can be OPEnDAP urls.', max_occurs=1, supported_formats=[FORMATS.NETCDF, FORMATS.DODS]), ComplexInput('polygon', 'Contour of region of interest', max_occurs=1, supported_formats=[FORMATS.SHP]), LiteralInput('dt0', 'Initial datetime', abstract='Initial datetime for temporal subsetting.', data_type='dateTime', default=None, min_occurs=0, max_occurs=1), LiteralInput('dt1', 'Final datetime', abstract='Final datetime for temporal subsetting.', data_type='dateTime', default=None, min_occurs=0, max_occurs=1), LiteralInput( 'variable', 'Variable', abstract=('Name of the variable in the NetCDF file.' 'If not provided, all variables will be subsetted.'), data_type='string', min_occurs=0) ] outputs = [ ComplexOutput('output', 'netCDF output', as_reference=True, supported_formats=[FORMATS.NETCDF]), ] super(SubsetPolyProcess, self).__init__( self._handler, identifier='subset_polygon', title='Subset with a polygon', version='0.1', abstract=( 'Return the data for which grid cells intersect the polygon ' 'for each input dataset as well as the time range selected.'), inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): self.status_percentage_steps = { "start": 0, "process": 10, "build_output": 95, "complete": 100, } inputs = [ ComplexInput( "netcdf", "Daily NetCDF Dataset", abstract="NetCDF file", min_occurs=1, max_occurs=1, supported_formats=[FORMATS.NETCDF, FORMATS.DODS], ), ComplexInput( "updates_file", "Updates File(yaml)", abstract= "The filepath of an updates file that specifies what to do to the metadata it finds in the NetCDF file", min_occurs=0, max_occurs=1, supported_formats=[ Format( mime_type="text/x-yaml", extension=".yaml", ) ], ), LiteralInput( "updates_string", "Updates String(yaml format)", abstract= "The string in yaml format that specifies what to do to the metadata it finds in the NetCDF file", min_occurs=0, max_occurs=1, data_type="string", ), log_level, ] outputs = [nc_output] super(UpdateMetadata, self).__init__( self._handler, identifier="update_metadata", title="Update NetCDF Metadata", abstract= "Update file containing missing, invalid, or incorrectly named global or variable metadata attributes", store_supported=True, status_supported=True, inputs=inputs, outputs=outputs, )
def __init__(self): inputs = [ LiteralInput('latitude', 'Outlet latitude', data_type='float', abstract='Latitudinal coordinate of the watershed outlet.', ), LiteralInput('longitude', 'Outlet longitude', data_type='float', abstract='Longitudinal coordinate of the watershed outlet.', ), LiteralInput('name', 'Watershed name', data_type='string', abstract='Name of the watershed.'), ComplexInput('dem', 'Digital Elevation Model', abstract='An URL pointing at the DEM to be used to compute the watershed boundary. Defaults ' 'to the HydroSheds DEM.', # TODO: Include details (resolution, version). metadata=[Metadata('HydroSheds Database', 'http://hydrosheds.org'), Metadata( 'Lehner, B., Verdin, K., Jarvis, A. (2008): New global ' 'hydrography derived from ' 'spaceborne elevation data. Eos, Transactions, AGU, 89(10): 93-94.', 'https://doi.org/10.1029/2008EO100001')], min_occurs=0, default='', # TODO: Enter default DEM from PAVICS supported_formats=[FORMATS.GEOTIFF, FORMATS.GML, FORMATS.WCS]), ComplexInput('dir', 'Flow direction grid', abstract='An URL pointing at the flow direction grid to be used to compute the watershed ' 'boundary. Defaults to the HydroSheds product. If both the DEM and the flow ' 'direction are give, the flow direction supercedes the DEM.', # TODO: Include details (resolution, version). metadata=[Metadata('HydroSheds Database', 'http://hydrosheds.org'), Metadata( 'Lehner, B., Verdin, K., Jarvis, A. (2008): New global ' 'hydrography derived from ' 'spaceborne elevation data. Eos, Transactions, AGU, 89(10): 93-94.', 'https://doi.org/10.1029/2008EO100001')], min_occurs=0, default='', # TODO: Enter default DIR from PAVICS supported_formats=[FORMATS.GEOTIFF, FORMATS.GML, FORMATS.WCS]), ] outputs = [ ComplexOutput('boundary', 'Watershed boundary', abstract='A polygon defining the watershed boundary.', as_reference=True, supported_formats=FORMATS.GML), ] super(WatershedDelineation, self).__init__( self._pysheds_handler, identifier="watershed_delineation", title="Watershed delineation algorithm", version="1.0", abstract="Return the boundary of a watershed computed using a digital elevation model.", metadata=[], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): self.status_percentage_steps = { "start": 0, "process": 10, "build_output": 95, "complete": 100, } inputs = [ LiteralInput( "rules", "Rules", abstract="Rule expressions", min_occurs=1, max_occurs=100, data_type="string", ), ComplexInput( "parse_tree", "Parse Tree Dictionary", abstract= "File path to dictionary used for rule getter function", supported_formats=[FORMATS.JSON], ), ComplexInput( "variables", "Variable Dictionary", abstract="File path to dictionary used for variables", supported_formats=[FORMATS.JSON], ), log_level, ] outputs = [json_output] super(EvaluateRule, self).__init__( self._handler, identifier="evaluate_rule", title="Evaluate Rule", abstract="Evaluate parse trees to determine truth value of a rule", keywords=["evaluate", "rule"], metadata=[ Metadata("PyWPS", "https://pywps.org/"), Metadata("Birdhouse", "http://bird-house.github.io/"), Metadata("PyWPS Demo", "https://pywps-demo.readthedocs.io/en/latest/"), ], version="0.1.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def test_dods_validator(self): opendap_input = ComplexInput('dods', 'opendap test', [FORMATS.DODS,]) opendap_input.url = "http://test.opendap.org:80/opendap/netcdf/examples/sresa1b_ncar_ccsm3_0_run1_200001.nc" self.assertTrue(validatedods(opendap_input, MODE.NONE), 'NONE validation') self.assertTrue(validatedods(opendap_input, MODE.SIMPLE), 'SIMPLE validation') if WITH_NC4: self.assertTrue(validatedods(opendap_input, MODE.STRICT), 'STRICT validation') opendap_input.url = 'Faulty url' self.assertFalse(validatedods(opendap_input, MODE.STRICT)) else: self.assertFalse(validatedods(opendap_input, MODE.STRICT), 'STRICT validation')
def __init__(self): inputs = [ ComplexInput('obs', 'Stream flow observation', abstract='Steam flow observation time series', supported_formats=(FORMATS.NETCDF, )), ComplexInput('sim', 'Stream flow simulation', abstract='Stream flow simulation time series', supported_formats=(FORMATS.NETCDF, )), LiteralInput( 'name', 'Objective function name', abstract= "One or multiple objective function name. If None, defaults to all.", data_type='string', allowed_values=tuple(funcs.keys()), default=None, min_occurs=0, max_occurs=17) ] outputs = [ ComplexOutput( 'metrics', 'Objective function values', abstract= "Returns up to 17 objective function values, depending on the user's " "requests. By default all 17 are returned. JSON dictionary format.", supported_formats=(FORMATS.JSON, )), ] super(ObjectiveFunctionProcess, self).__init__( self._handler, identifier="objective-function", title= "Objective-function process based on SpotPy and its 17 objective functions.", version="1.0", abstract= "This process takes two NETCDF files (one containing variable 'q_sim' and the other 'q_obs') " "and computes objective-function metrics between them.", metadata=[ Metadata("SPOTPY Documentation", "http://fb09-pasig.umwelt.uni-giessen.de/spotpy/") ], inputs=inputs, outputs=outputs, keywords=[ "objective functions", "hydrological signatures", "optimization" ] + list(funcs.keys()), status_supported=True, store_supported=True)
def test_complex_input_default_and_supported(self): complex_in = ComplexInput( 'foo', 'Complex foo', supported_formats=[Format('a/b'), Format('c/d')]) doc = complex_in.describe_xml() [default_format] = xpath_ns(doc, './ComplexData/Default/Format') [default_mime_el] = xpath_ns(default_format, './MimeType') self.assertEqual(default_mime_el.text, 'a/b') supported_mime_types = [] for supported_el in xpath_ns(doc, './ComplexData/Supported/Format'): [mime_el] = xpath_ns(supported_el, './MimeType') supported_mime_types.append(mime_el.text) self.assertEqual(supported_mime_types, ['a/b', 'c/d'])
def __init__(self): self.status_percentage_steps = dict( common_status_percentages, **{"config_rebuild": 10}, ) inputs = [ log_level, ComplexInput( "uhs_files", "UHS_Files", abstract="Path to UHS file", min_occurs=1, supported_formats=[ Format("text/plain", extension=".uhs_s2"), ], ), ComplexInput( "station_file", "Station_FILE", abstract="Path to stations file", min_occurs=1, max_occurs=1, supported_formats=[FORMATS.TEXT], ), domain, ComplexInput( "config_file", "Convert Configuration", abstract="Path to input configuration file for Convert process", min_occurs=1, max_occurs=1, supported_formats=[Format("text/cfg", extension=".cfg")], ), ] outputs = [ nc_output, ] super(Convert, self).__init__( self._handler, identifier="convert", title="Parameter Conversion", abstract= "A simple conversion utility to provide users with the ability to convert old routing model setups into RVIC parameters.", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def __init__(self): inputs = [ ComplexInput( 'resource', "NetCDF file", abstract="Link to NetCDF or NcML file on this server", supported_formats=[ FORMATS.NETCDF, ], # FORMATS.NCML], to become available in PyWPS 4.2.5 min_occurs=1, max_occurs=1) ] outputs = [ ComplexOutput('dap', 'DAP url', as_reference=True, supported_formats=[FORMATS.DODS]), ] super(NcToDap, self).__init__( self._handler, identifier='nc_to_dap', title="Convert file URL to DAP URL", abstract= "Return Data Access Protocol link to a netCDF or NcML file.", version="1", metadata=[ MetadataUrl('User Guide', 'http://emu.readthedocs.io/en/latest/', anonymous=True), ], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def connected_to(self, task_input, upstream_task, upstream_task_output): """ Override TaskPE fct. See TaskPE.connected_to for details. The ReducePE uses the upstream task output format to set it's own input format and it's set upon connection """ # Set the supported input description which is the same as the upstream task supported output up_task_out_desc = upstream_task.get_output_desc(upstream_task_output) params = dict(identifier=self.REDUCE_INPUT, title=self.REDUCE_INPUT, min_occurs=1, max_occurs=sys.maxint) if up_task_out_desc.dataType == 'ComplexData': params['supported_formats'] = [ Format(mime_type=up_task_out_desc.defaultValue.mimeType, schema=up_task_out_desc.defaultValue.schema, encoding=up_task_out_desc.defaultValue.encoding) ] self.input_desc = Input(ComplexInput(**params).describe_xml()) elif up_task_out_desc.dataType == 'BoundingBoxData': params['crss'] = up_task_out_desc.supportedValues self.input_desc = Input(BoundingBoxInput(**params).describe_xml()) else: params['data_type'] = up_task_out_desc.dataType self.input_desc = Input(LiteralInput(**params).describe_xml())
def __init__(self): inputs = [ ComplexInput('resource', 'NetCDF resource', abstract='NetCDF files, can be OPEnDAP urls.', max_occurs=1000, supported_formats=[ Format('application/x-netcdf'), Format('application/x-tar'), Format('application/zip')])] outputs = [ ComplexOutput('output', 'Merged NetCDF files', abstract='Temporally merged NetCDF files.', as_reference=True, supported_formats=[Format('application/x-netcdf')])] super(NCMergeProcess, self).__init__( self._handler, identifier='ncmerge', title='NetCDF merge', version='0.1', abstract=('Merge NetCDF files in the time dimension.'), inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [ LiteralInput('input 1', 'Input1 number', default='100', data_type='integer'), ComplexInput('input-2', 'json input', supported_formats=[FORMATS.JSON, ]), ] outputs = [ LiteralOutput('output.1', 'Add 1 to `input 1`.', data_type='float'), ComplexOutput('output 2', 'Same thing as input-2.', supported_formats=[FORMATS.JSON, ]), ] super(NonPyID, self).__init__( self._handler, identifier='non.py-id', # TODO:fails with pywps: u'fake.process-for testing &é;' title="Dummy process including non-pythonic identifiers", abstract="Dummy process whose process, input and output identifiers include characters not allowed " "in Python.", version="1.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ ComplexInput('poly_in', 'Input1', supported_formats=[Format('application/gml+xml')], mode=MODE.STRICT), LiteralInput('buffer', 'Buffer', data_type='float', allowed_values=(0, 1, 10, (10, 10, 100), (100, 100, 1000))) ] outputs = [ ComplexOutput('buff_out', 'Buffered', supported_formats=[Format('application/gml+xml')]) ] super(GrassBuffer, self).__init__( self._handler, identifier='grassbuffer', version='0.1', title="GRASS v.buffer", abstract='This process is using GRASS GIS v.buffer module ', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, #grass_location="/tmp/outputs/pyws_process_GMkyxP/pywps_location" grass_location="epsg:3857")
def create_sum_one(): def sum_one(request, response): input = request.inputs['input'] # What do we need to assert a Complex input? #assert type(input) is text_type sys.path.append("/usr/lib/grass64/etc/python/") import grass.script as grass # Import the raster and set the region if grass.run_command("r.in.gdal", flags="o", out="input", input=input) != 0: raise NoApplicableCode("Could not import cost map. Please check the WCS service.") if grass.run_command("g.region", flags="ap", rast="input") != 0: raise NoApplicableCode("Could not set GRASS region.") # Add 1 if grass.mapcalc("$output = $input + $value", output="output", input="input", value=1.0) != 0: raise NoApplicableCode("Could not set GRASS region.") # Export the result out = "./output.tif" if grass.run_command("r.out.gdal", input="output", type="Float32", output=out) != 0: raise NoApplicableCode("Could not export result from GRASS.") response.outputs['output'] = out return response return Process(handler=sum_one, identifier='sum_one', title='Process Sum One', inputs=[ComplexInput('input', [Format('image/img')])], outputs=[ComplexOutput('output', [Format('image/tiff')])])
def __init__(self): inputs = [ ComplexInput('poly_in', 'Input vector file', supported_formats=[Format('application/gml+xml')], mode=MODE.STRICT), LiteralInput('buffer', 'Buffer size', data_type='float', allowed_values=(0, 1, 10, (10, 10, 100), (100, 100, 1000))) ] outputs = [ ComplexOutput('buff_out', 'Buffered file', supported_formats=[Format('application/gml+xml')]) ] super(ProcessOneOutput, self).__init__( self._handler, identifier='process-one-output', title='Process with one vector output', abstract='Buffers around the input features using the GDAL library', version='1.0', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ ComplexInput('polygon', 'Region definition', abstract="A polygon defining a region.", supported_formats=[ FORMATS.GML, ]), ] outputs = [ LiteralOutput( 'output', 'The centroid of the polygon geometry.', abstract= "The coordinates of the polygon's approximate centroid.", ) ] super(PolyCentroid, self).__init__( self._handler, identifier='poly_centroid', title="Approximate centroid of a polygon.", abstract= "Return the polygon's centroid coordinates. If the geometry contains multiple polygons, " "only the centroid of the first one will be computed. Do not use for serious computations" ", this is only a test process and uses a crude approximation. ", version="1.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default=None, min_occurs=0, max_occurs=1, abstract='Enter the variable name (variable will be detected if not set)'), ] outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def load_meta(self): """Extract process meta data from underlying object.""" self.icclim_func = libclim._icclim_function_map[self.key]['func'] doc = self.icclim_func.func_doc self.ocgis_cls = fr[self.key] self.identifier = self.ocgis_cls.key self.title = self.ocgis_cls.key.split('_')[1] self.abstract = doc.split('\n')[1].strip() self.has_required_variables = hasattr(self.ocgis_cls, 'required_variables') if self.has_required_variables: self.resource_inputs = [] # No more resource input. for key in self.ocgis_cls.required_variables: self.resource_inputs.append( ComplexInput( key, key, abstract= 'NetCDF Files or archive (tar/zip) containing netCDF files.', metadata=[Metadata('Info')], min_occurs=1, max_occurs=1000, supported_formats=[ Format('application/x-netcdf'), Format('application/x-tar'), Format('application/zip'), ]))
def test_complex_input_identifier(self): complex = ComplexInput('foo', 'Complex foo', allowed_formats=[Format('bar/baz')]) doc = complex.describe_xml() assert doc.tag == E.Input().tag [identifier_el] = xpath_ns(doc, './ows:Identifier') assert identifier_el.text == 'foo'
def test_complex_input_identifier(self): complex_in = ComplexInput("foo", "Complex foo", supported_formats=[Format("bar/baz")]) doc = complex_in.describe_xml() assert doc.tag == E.Input().tag [identifier_el] = xpath_ns(doc, "./ows:Identifier") assert identifier_el.text == "foo"