def __init__(self): inputs = [LiteralInput('name', 'Input name', data_type='string')] outputs = [ LiteralOutput('response', 'Output response', data_type='string') ] super(ProcessNoOutput, self).__init__( self._handler, identifier='process-no-output', title='Process with no vector output', abstract= 'Returns a literal string output with Hello plus the inputed name', version='1', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def test_literal_output(self): literal = LiteralOutput('literal', 'Literal foo', uoms=['metre']) doc = literal.describe_xml() [output] = xpath_ns(doc, '/Output') [identifier] = xpath_ns(doc, '/Output/ows:Identifier') [data_type] = xpath_ns(doc, '/Output/LiteralOutput/ows:DataType') [uoms] = xpath_ns(doc, '/Output/LiteralOutput/UOMs') [default_uom] = xpath_ns(uoms, './Default/ows:UOM') supported_uoms = xpath_ns(uoms, './Supported/ows:UOM') assert output is not None assert identifier.text == 'literal' assert data_type.attrib['{%s}reference' % NAMESPACES['ows']] == OGCTYPE['string'] assert uoms is not None assert default_uom.text == 'metre' assert default_uom.attrib['{%s}reference' % NAMESPACES['ows']] == OGCUNIT['metre'] assert len(supported_uoms) == 1
def __init__(self): inputs = [] outputs = [LiteralOutput('answer', 'Answer to Ultimate Question', data_type='string')] super(UltimateQuestion, self).__init__( self._handler, identifier='ultimate_question', version='1.3.3.7', title='Answer to the ultimate question', abstract='This process gives the answer to the ultimate question of "What is the meaning of life?', profile='', metadata=['Ultimate Question', 'What is the meaning of life'], inputs=inputs, outputs=outputs, store_supported=False, status_supported=False )
def create_translated_greeter(): def greeter(request, response): name = request.inputs['name'][0].data response.outputs['message'].data = "Hello {}!".format(name) return response return Process( handler=greeter, identifier='greeter', title='Greeter', abstract='Say hello', inputs=[ LiteralInput( 'name', 'Input name', data_type='string', abstract='Input description', translations={ "fr-CA": { "title": "Nom", "abstract": "Description" } }, ) ], outputs=[ LiteralOutput( 'message', 'Output message', data_type='string', abstract='Output description', translations={ "fr-CA": { "title": "Message de retour", "abstract": "Description" } }, ) ], translations={ "fr-CA": { "title": "Salutations", "abstract": "Dire allô" } }, )
def __init__(self): inputs = [LiteralInput('name', 'Input name', data_type='string')] outputs = [ LiteralOutput('response', 'Output response', data_type='string') ] super(SayHello, self).__init__(self._handler, identifier='say_hello', title='Process Say Hello', abstract='Returns a literal string output\ with Hello plus the inputed name', version='1.3.2', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [] outputs = [ LiteralOutput('IaaS_deploy_execute', 'List of (Broker, Celery Queue) available', data_type='string'), ] super(GetCloudParams, self).__init__(self._handler, identifier='get_cloud_params', abstract='Get cloud parameters', title='Get Cloud Parameters', version='0.1', inputs=inputs, outputs=outputs, store_supported=False, status_supported=False)
def __init__(self): inputs = [LiteralInput('delay', 'Delay between every update', data_type='float')] outputs = [LiteralOutput('sleep_output', 'Sleep Output', data_type='string')] super(Sleep, self).__init__( self._handler, identifier='sleep', version='None', title='Sleep Process', abstract='This process will sleep for a given delay or 10 seconds if not a valid value', profile='', metadata=['Sleep', 'Wait', 'Delay'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ComplexInput('layer', 'Layer', supported_formats=[get_format('GML')])] outputs = [LiteralOutput('count', 'Count', data_type='integer')] super(FeatureCount, self).__init__( self._handler, identifier='feature_count', version='None', title='Feature count', abstract='This process counts the number of features in a vector', profile='', metadata=['Feature', 'Count'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ ComplexInput( "file", "File", max_occurs=2, abstract="Path to a local or online nc file", supported_formats=[FORMATS.NETCDF, FORMATS.DODS], ), ComplexInput( "csv_input", "CSV", max_occurs=1, abstract="CSV document", supported_formats=[ Format("text/csv", extension=".csv"), FORMATS.TEXT ], ), LiteralInput( "argc", "Argument count dictionary", max_occurs=1, abstract="Number of input arguments for each input", data_type="string", ), ] outputs = [ LiteralOutput( "collected_argc", "Collected argument count", abstract="Number of collected arguments", data_type="integer", ), ] super(TestProcessMultiInput, self).__init__( self._handler, identifier="test_collect_args", title="Test collect_args", abstract="A simple process that tests collect_args.", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def __init__(self): inputs = [] outputs = [LiteralOutput('output', 'Output', data_type='string')] super(Dummy, self).__init__( self._handler, identifier='dummy', version='None', title='Dummy Process', abstract='This is dummy process, nothing useful.', profile='', #wsdl='', metadata=['Sleep', 'Wait', 'Delay'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [LiteralInput('x1', 'A point X', data_type='float'), LiteralInput('y1', 'A point Y', data_type='float'), LiteralInput('x2', 'B point X', data_type='float'), LiteralInput('y2', 'B point Y', data_type='float')] outputs = [LiteralOutput( 'path', 'Route path from A to B', data_type='string')] super(AtoBRoute, self).__init__( self._handler, identifier='atob_route', title='Define an ASTAR optimized path from A to B', version='0.0.1', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [LiteralInput('delay', 'Delay between every update', default='1', data_type='float')] outputs = [LiteralOutput('sleep_output', 'Sleep Output', data_type='string')] super(Sleep, self).__init__(self._handler, identifier='sleep', version='1.0', title='Sleep Process', abstract='Testing a long running process, in the sleep.' 'This process will sleep for a given delay or 10 seconds if not a valid value.', profile='', metadata=[ Metadata('PyWPS Demo', 'https://pywps-demo.readthedocs.io/en/latest/'), ], inputs=inputs, outputs=outputs, store_supported=False, status_supported=False)
def __init__(self): inputs = [ LiteralInput('inputa', 'Input 1', data_type='float', abstract='Enter Input 1', default="2.0"), LiteralInput('inputb', 'Input 2', data_type='float', abstract='Enter Input 2', default="3.0"), LiteralInput( 'operator', 'Operator', data_type='string', abstract='Choose a binary Operator', default='add', allowed_values=['add', 'substract', 'divide', 'multiply']) ] outputs = [ LiteralOutput('output', 'Binary operator result', data_type='float') ] super(BinaryOperator, self).__init__( self._handler, identifier='binaryoperatorfornumbers', title='Binary Operator for Numbers', abstract='Performs operation on two numbers and returns the answer. ' 'This example process is taken from Climate4Impact.', metadata=[ Metadata('Birdhouse', 'http://bird-house.github.io/'), MetadataUrl('User Guide', 'http://emu.readthedocs.io/en/latest/', anonymous=True), Metadata('Climate4Impact', 'https://dev.climate4impact.eu') ], version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def create_complex_nc_process(): def complex_proces(request, response): from pywps.dependencies import netCDF4 as nc url = request.inputs['dods'][0].url with nc.Dataset(url) as D: response.outputs['conventions'].data = D.Conventions response.outputs['outdods'].url = url response.outputs['ncraw'].file = os.path.join(DATA_DIR, 'netcdf', 'time.nc') response.outputs['ncraw'].data_format = FORMATS.NETCDF return response return Process( handler=complex_proces, identifier='my_opendap_process', title='Opendap process', inputs=[ ComplexInput( 'dods', 'Opendap input', supported_formats=[Format('DODS'), Format('NETCDF')], # mode=MODE.STRICT ) ], outputs=[ LiteralOutput( 'conventions', 'NetCDF convention', ), ComplexOutput('outdods', 'Opendap output', supported_formats=[ FORMATS.DODS, ], as_reference=True), ComplexOutput('ncraw', 'NetCDF raw data output', supported_formats=[ FORMATS.NETCDF, ], as_reference=False) ])
def __init__(self): inputs = [ LiteralInput('polygon', 'WKT search polygon', abstract='WKT', data_type='string' ), LiteralInput('srid', 'WKT CRS (EPSG ID)', abstract='The CRS of the WKT polygon - use the numeric part of an EPSG ID, eg 4326 for EPSG:4326. Default is 4326', data_type='string', default='4326' ), LiteralInput('data_srid', 'WKT CRS (EPSG ID)', abstract='The CRS of the underlying data - use the numeric part of an EPSG ID, eg 28355 for EPSG:28355', data_type='string', ), LiteralInput('project', 'project ID', abstract='data storage project', data_type='string', default='all', min_occurs=0 ), LiteralInput('metadata', 'metadata response (or not)', abstract='A switch to determine what metadata to return, if any. Default is none (file list only). For all available metadata use "all". Otherwise Provide a comma delimited list of words, options are: "density" (approximate point density of files intersecting ROI) , "count" (# of points in each file), "schema" (data storage schema equivalent to pdal info --schema), "classes" (approximate number of points in each class), "exactboundary" (exact boundary of point data as WKT multipolygon in native data CRS)', data_type='string', default='no', min_occurs=0 ) ] outputs = [ LiteralOutput('metadata_response', 'Response metadata', abstract='JSON dictionary of metadata which exist in the user-submitted polygon. Will be empty if no data exist in the ROI', data_type='string') ] super(polygonQuery, self).__init__( self._handler, identifier='polygonquery', version='0', title="Query data with a WKT polygon", abstract='Provide a WKT polygon, and return a list of files which contain data inside the polygon. This process does not transform input polygons, it requires a user-defined SRS string which matches the coordinate system used in the polygon.', profile='', inputs=inputs, outputs=outputs, )
def __init__(self): """Sample.""" inputs = [ LiteralInput('input_dir', 'Workflow data volume path', data_type='string', abstract=""" Path to a directory within the Workflow Data volume. The service will locate all files within this directory and warp them. """, min_occurs=1, max_occurs=2) ] outputs = [ LiteralOutput( 'output_dir', 'Workflow data volume path', data_type='string', abstract=""" Path to a directory within the Workflow Data volume. The service will store all outputs in this dir, then provide a reference to the directory which other services can use. """, ) ] super(GdalWarpRef, self).__init__( identifier='gdalwarp_ref', abstract=""" The process warps an input raster. Locates all tiff files within the input_dir and creates a warped version in the output_dir location. <a href="http://gdal.org/gdalwarp.html">man page</a> """, version='0.1', metadata=[Metadata('Sample'), Metadata('Test')], title="GDAL Sample Process", profile='', inputs=inputs, outputs=outputs, )
def __init__(self): inputs = [ComplexInput('layer', 'Layer', [Format('application/gml+xml')])] outputs = [LiteralOutput('count', 'Count', data_type='integer')] super(FeatureCount, self).__init__( self._handler, identifier='feature_count', version='None', title='Feature count', abstract='This process counts the number\ of features in an uploaded GML ', profile='', metadata=[Metadata('Feature'), Metadata('Count')], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ LiteralInput('one_integer', 'Some integer input', data_type='integer') ] outputs = [ LiteralOutput('repeated_integer', 'Echo of the input', data_type='integer') ] super(SimplestTest, self).__init__(self._handler, identifier='simplesttest', title='Simplest Test', version='0.1', inputs=inputs, outputs=outputs)
def __init__(self): inputs = [ LiteralInput('x1', 'Punto1 Coordenada X', data_type='float') , LiteralInput('y1', 'Punto1 Coordenada Y', data_type='float' ), LiteralInput('x2', 'Punto2 Coordenada X', data_type='float' ), LiteralInput('y2', 'Punto2 Coordenada Y', data_type='float') ] outputs = [LiteralOutput('resultado', 'Resultado Calculo', data_type='float' )] super(DistanciaP1P2, self).__init__( self._handler, identifier='distanciap1p2', version='None', title='Distancia entre P1 y P2', abstract='Calcula la distancia euclidiana entre dos puntos', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ LiteralInput( "name", "Your name", abstract="Please enter your name.", keywords=["name", "firstname"], data_type="string", ) ] outputs = [ LiteralOutput( "output", "Output response", abstract="A friendly Hello from us.", keywords=["output", "result", "response"], data_type="string", ) ] super(SayHello, self).__init__( self._handler, identifier="hello", title="Say Hello", abstract="Just says a friendly Hello." "Returns a literal string output with Hello plus the inputed name.", keywords=["hello", "demo"], metadata=[ Metadata("PyWPS", "https://pywps.org/"), Metadata("Birdhouse", "http://bird-house.github.io/"), Metadata("PyWPS Demo", "https://pywps-demo.readthedocs.io/en/latest/"), Metadata("Emu: PyWPS examples", "https://emu.readthedocs.io/en/latest/"), ], version="1.5", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def __init__(self): inputs = [ ComplexInput('layer', 'Layer', [Format('application/gml+xml')]) ] outputs = [LiteralOutput('length', 'Total length', data_type='string')] super(TotalLength, self).__init__(self._handler, identifier='grass_length', title='Total line length', abstract="""Process returns the length of each feature from a submitted GML file""", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, grass_location="epsg:3358") def _handler(self, request, response): # ogr2ogr requires gdal-bin from grass.script import core as grass grass.run_command("v.in.ogr", input=request.inputs["layer"][0].file, output="lines") output = grass.pipe_command("v.report", map="lines", option="length") line_length = 0 for line in output.stdout.readlines()[1:]: line = line.strip() length = line.split("|")[-1] line_length += float(length) response.outputs['length'].data = line_length return response
def create_sleep(): def sleep(request, response): seconds = request.inputs['seconds'] assert type(seconds) is type(1.0) step = seconds / 10 for i in range(10): # How is status working in version 4 ? #self.status.set("Waiting...", i * 10) time.sleep(step) response.outputs['finished'] = "True" return response return Process( handler=sleep, identifier='sleep', title='Sleep', inputs=[LiteralInput('seconds', title='Seconds', data_type='float')], outputs=[ LiteralOutput('finished', title='Finished', data_type='boolean') ])
def __init__(self): inputs = [] outputs = [] variable = LiteralInput('variable', 'Variable', abstract="Variable list", data_type='string') domain = LiteralInput('domain', 'Domain', abstract="Domain list", data_type='string') operation = LiteralInput('operation', 'Operation list', abstract="Operation list", data_type='string') response = LiteralOutput('response', 'Response', abstract="Response", data_type='string') inputs = [variable, domain, operation] outputs = [response] super(oph_esgf_aggregate, self).__init__( self._handler, identifier='OPHIDIA.aggregate', title='OPHIDIA.aggregate', abstract= "An Ophidia ESGF CWT operator used to aggregate files of a dataset into only one file", version=_version, inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def create_complex_nc_process(): def complex_proces(request, response): from pywps.dependencies import netCDF4 as nc url = request.inputs['dods'][0].url with nc.Dataset(url) as D: response.outputs['conventions'].data = D.Conventions response.outputs['outdods'].url = url return response return Process( handler=complex_proces, identifier='my_opendap_process', title='Opendap process', inputs=[ ComplexInput( 'dods', 'Opendap input', supported_formats=[ Format('DODS'), Format('NETCDF'), ], # mode=MODE.STRICT ) ], outputs=[ LiteralOutput( 'conventions', 'NetCDF convention', ), ComplexOutput('outdods', 'Opendap output', supported_formats=[ Format('DODS'), Format('NETCDF'), ], as_reference=True) ])
def __init__(self): inputs = [ LiteralInput( 'dry_run', 'Dry run mode. Default false', data_type='boolean', default=False, ), LiteralInput( 'count', 'Number of Files (Limit 10)', abstract= 'How many files do you want to download? The limit is 10', data_type='integer', allowed_values=[AllowedValue(minval=1, maxval=20)], default=1, mode=MODE.SIMPLE), ] outputs = [ LiteralOutput('output', 'Output response', data_type='string') ] super(SimpleDryRun, self).__init__( self._handler, identifier='simple_dry_run', title='Simple Dry Run', abstract='A dummy download as simple dry-run example.', metadata=[ MetadataUrl( 'User Guide', 'https://emu.readthedocs.io/en/latest/processes.html', anonymous=True), ], version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ LiteralInput(identifier='arg', title='Input number', data_type='integer', abstract="Argument") ] outputs = [ LiteralOutput(identifier='response', title='Output response', data_type='integer', abstract="Result") ] super(Square, self).__init__(self.handler, identifier='square', title='Square Process', abstract='Quadriert den Input', version='1.0.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ LiteralInput('name', 'Your name', abstract='Please enter your name.', keywords=['name', 'firstname'], data_type='string') ] outputs = [ LiteralOutput('output', 'Output response', abstract='A friendly Hello from us.', keywords=['output', 'result', 'response'], data_type='string') ] super(SayHello, self).__init__( self._handler, identifier='hello', title='Say Hello', abstract='Just says a friendly Hello.' 'Returns a literal string output with Hello plus the inputed name.', keywords=['hello', 'demo'], metadata=[ Metadata( 'User Guide', 'https://emu.readthedocs.io/en/latest/processes.html' ), # noqa Metadata('PyWPS Demo', 'https://pywps-demo.readthedocs.io/en/latest/'), ], version='1.5', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): self.status_percentage_steps = dict( common_status_percentages, **{ "load_rdata": 10, "save_rdata": 90, }, ) inputs = quantile_inputs outputs = [ LiteralOutput( "output_vector", "Output Vector", abstract="A vector of the quantiles in question", data_type="string", ), rda_output, ] super(ClimdexQuantile, self).__init__( self._handler, identifier="climdex_quantile", title="Climdex Quantile", abstract="Implements R’s type=8 in a more efficient manner", metadata=[ Metadata("NetCDF processing"), Metadata("Climate Data Operations"), Metadata("PyWPS", "https://pywps.org/"), Metadata("Birdhouse", "http://bird-house.github.io/"), Metadata("PyWPS Demo", "https://pywps-demo.readthedocs.io/en/latest/"), ], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def default_outputs(): return ( LiteralOutput( 'success', 'Success', abstract="""True if the metric has been successfully calculated. If false please check the log files""", data_type='string'), ComplexOutput('recipe', 'recipe', abstract='ESMValTool recipe used for processing.', as_reference=True, supported_formats=[Format('text/plain')]), ComplexOutput('log', 'Log File', abstract='Log File of ESMValTool processing.', as_reference=True, supported_formats=[Format('text/plain')]), ComplexOutput('debug_log', 'ESMValTool Debug File', abstract='Debug Log File of ESMValTool processing.', as_reference=True, supported_formats=[Format('text/plain')]), )
def __init__(self): inputs = [LiteralInput('start', 'Start date (eg. 2017-03-01)', data_type='string'), LiteralInput('end', 'End date (eg. 2017-04-01)', data_type='string') ] outputs = [LiteralOutput('stats', 'Computed LST statistics', data_type='string') ] super(ModisV1, self).__init__( self._handler, identifier='modis-v1', version='0.1', title="Modis process (v1)", abstract='The process uses the GRASS GIS to compute LST ' \ 'statistics for given period in 2017 for Germany', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, grass_location="/opt/grassdata/germany-modis" )