def __init__(self): """Create a WPS process from an xclim indicator class instance.""" if self.xci is None: raise AttributeError("Use the `make_xclim_indicator_process` function instead.") attrs = self.xci.json() outputs = [ ComplexOutput('output_netcdf', 'Function output in netCDF', abstract="The indicator values computed on the original input grid.", as_reference=True, supported_formats=[FORMATS.NETCDF, ] # To support FORMATS.DODS we need to get the URL. ), ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[FORMATS.TEXT]), ] identifier = attrs['identifier'] super(_XclimIndicatorProcess, self).__init__( self._handler, identifier=identifier, version='0.1', title=unidecode(attrs['long_name']), abstract=unidecode(attrs['abstract']), inputs=self.load_inputs(eval(attrs['parameters'])), outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): self.variables = ['tas', 'pr', 'psl', 'ts'] self.frequency = 'mon' inputs = [ *model_experiment_ensemble(model='ACCESS1-0', experiment='historical', ensemble='r1i1p1', required_variables=self.variables, required_frequency=self.frequency), *year_ranges((1850, 2005)), ] outputs = [ ComplexOutput('tas_trend_ann_plot', 'Annual TAS trend', abstract='Annual trend in surface air temperature.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('sst_trend_ann_plot', 'Annual SST trend', abstract='Annual trend in sea surface temperature.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('psl_trend_ann_plot', 'Annual PSL trend', abstract='Annual trend in sea level pressure.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('pr_trend_ann_plot', 'Annual precipitation trend', abstract='Annual trend in precipitation.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput( 'archive', 'Archive', abstract= 'The complete output of the ESMValTool processing as an zip archive.', as_reference=True, supported_formats=[Format('application/zip')]), *default_outputs(), ] super(CVDP, self).__init__( self._handler, identifier="cvdp", title="NCAR CVDPackage", version=runner.VERSION, abstract="Run the NCAR CVDPackage", metadata=[ Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata( 'Documentation', 'https://esmvaltool.readthedocs.io/en/version2_development/recipes/recipe_cvdp.html', role=util.WPS_ROLE_DOC), Metadata('Estimated Calculation Time', '2 Minutes'), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): inputs = [ LiteralInput('namebacino', 'Nome del bacino', data_type='string', abstract="Inserire il nome del bacino", min_occurs=1), ComplexInput('vectorbacino', 'Vettoriale del bacino', abstract="Vettoriale del bacino delimitato in formato GML-XML", supported_formats=[Format('application/gml+xml'),Format('application/vnd.geo+json')] ) ] outputs = [ ComplexOutput('output', 'METALINK v3 output', abstract='Testing metalink v3 output', as_reference=False, supported_formats=[FORMATS.METALINK]), ComplexOutput('output_meta4', 'METALINK v4 output', abstract='Testing metalink v4 output', as_reference=False, supported_formats=[FORMATS.META4]) ] super(Renerfor_descrittori, self).__init__( self._handler, identifier='renerfor_descrittori_test', title='renerfor_descrittori_test', abstract='Inserimento di uno shapefile in formato GML e ritorno di un link per visualizzazione di un pdf scelto come esempio', version='0.1', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [] outputs = [ ComplexOutput( 'nc', 'NetCDF', as_reference=True, supported_formats=[FORMATS.NETCDF], ), ComplexOutput( 'preview', 'Preview', abstract='Preview of subsetted Dataset.', as_reference=True, supported_formats=[Format('image/png')], ), ] super(PelicanSubset, self).__init__( self._handler, identifier='pelican_subset', title='xarray.subset', abstract="subset netcdf files", version='2.0.0', metadata=[ Metadata('ESGF Compute API', 'https://github.com/ESGF/esgf-compute-api'), Metadata('ESGF Compute WPS', 'https://github.com/ESGF/esgf-compute-wps'), ], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self, xci): """Create a WPS process from an xclim indicator class instance.""" self.xci = xci self.varname = None attrs = xci.json() outputs = [ ComplexOutput( 'output_netcdf', 'Function output in netCDF', abstract= "The indicator values computed on the original input grid.", as_reference=True, supported_formats=[FORMATS.NETCDF]), ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[FORMATS.TEXT]), ] super(UnivariateXclimIndicatorProcess, self).__init__( self._handler, identifier=attrs['identifier'], version='0.1', title=attrs['long_name'], abstract=attrs['abstract'], inputs=self.load_inputs(eval(attrs['parameters'])), outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [] outputs = [ ComplexOutput('netcdf', 'netCDF dummy output file.', abstract="A very small test netCDF file. ", as_reference=True, supported_formats=[ FORMATS.NETCDF, ]), ComplexOutput('json', 'json dummy output file.', abstract="A very small test json file. ", as_reference=False, supported_formats=[ FORMATS.JSON, ]), ] super(OutputFormats, self).__init__( self._handler, identifier='output_formats', title="Return different output formats. ", abstract="Dummy process returning various output file formats.", version="2.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): # init process inputs = [ LiteralInput('outlet_x', 'Outlet Longitude', data_type='float'), LiteralInput('outlet_y', 'Outlet Latitude', data_type='float') ] outputs = [ ComplexOutput('watershed', 'Delineated Watershed', supported_formats=[Format('text/xml')]), ComplexOutput('snappoint', 'Snapped outlet point', supported_formats=[Format('text/xml')]), LiteralOutput('message', 'Processing message', data_type='string') ] super(watersheddelineationprocess, self).__init__( self._handler, identifier= 'watersheddelineationprocess', # must be same, as filename version='1.0', title="Watershed delineation process", abstract= 'This process snap a given point to nearest stream and perform watershed delineation function using GRASS within DR country area', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ LiteralInput( 'taxon_name', 'Taxonomic name of tree species', abstract= 'Taxonomic name of tree species (e. g. Fagus sylvatica)', data_type='string', min_occurs=1, max_occurs=1, default='Fagus sylvatica'), ] # self.BBox = self.addBBoxInput( # # identifier="BBox", # # title="Bounding Box", # # abstract="coordinates to define the region for occurence data fetch", # # minOccurs=1, # # maxOccurs=1, # # crss=['EPSG:4326'] outputs = [ ComplexOutput( 'output_map', 'Graphic of species occurences', abstract="PNG graphic file showing the presence of tree species \ according to GBIF data fetch", as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput( 'output_csv', 'Tree species table', abstract="Extracted CSV file containing the tree species table", as_reference=True, supported_formats=[Format('text/csv')]), ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[Format('text/plain')]) ] super(GBIFfetchProcess, self).__init__( self._handler, identifier="sdm_gbiffetch", title="Species distribution Model (GBIF data fetch only)", version="0.2", abstract="Species occurence search in Global Biodiversity \ Infrastructure Facillity (GBIF)", metadata=[ # Metadata('LSCE', 'http://www.lsce.ipsl.fr/en/index.php'), Metadata('Doc', 'http://flyingpigeon.readthedocs.io/en/latest/'), Metadata('GBIF', 'http://gbif.org/') ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [ ComplexInput('poly_in', 'Input vector file', supported_formats=[Format('application/gml+xml')], mode=MODE.STRICT), LiteralInput('buffer', 'Buffer size', data_type='float', allowed_values=(0, 1, 10, (10, 10, 100), (100, 100, 1000))) ] outputs = [ ComplexOutput('buff_out', 'Buffered file', supported_formats=[Format('application/gml+xml')]), ComplexOutput('centr_out', 'Centroud buffered file', supported_formats=[Format('application/gml+xml')]), ] super(ProcessTwoOutputs, self).__init__( self._handler, identifier='process-two-outputs', title='Process with two vector outputs', abstract= 'Buffers around the input features and compute centroids using the GDAL library', version='1.0', profile='', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ LiteralInput( "location", "Location coordinates (lon, lat)", abstract="Location coordinates (longitude, latitude) for point of interest.", data_type="string", min_occurs=1, max_occurs=2, ), # LiteralInput('level', 'Resolution level of HydroBASINS Shapes', # data_type='integer', # default=12, # allowed_values=[7, 8, 9, 10, 11, 12], # min_occurs=0, # max_occurs=1), # LiteralInput('lakes', 'Use the HydroBASINS version that includes lake outlines', # data_type='boolean', # default='true', # min_occurs=0, # max_occurs=1), LiteralInput( "aggregate_upstream", "Attempt to capture both the containing basin and all tributary basins from point", data_type="boolean", default="false", min_occurs=0, max_occurs=1, ), ] outputs = [ ComplexOutput( "feature", "Watershed feature geometry", abstract="Geographic representation of shape properties.", supported_formats=[FORMATS.GEOJSON], ), ComplexOutput( "upstream_ids", "HydroBASINS IDs for all immediate upstream basins", abstract="List of all tributary sub-basins according to their HydroBASINS IDs, " "including the downstream basin.", supported_formats=[FORMATS.JSON], ), ] super(HydroBasinsSelectionProcess, self).__init__( self._handler, identifier="hydrobasins-select", title="Select a HydroBASINS watershed geometry", version="1.1", abstract="Return a watershed from the HydroSheds database as a polygon vector file.", metadata=[], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [ ComplexInput('resource', 'Resource', abstract="NetCDF Files or archive (tar/zip) containing netCDF files", min_occurs=1, max_occurs=1000, # maxmegabites=5000, supported_formats=[ Format('application/x-netcdf'), Format('application/x-tar'), Format('application/zip'), ]), LiteralInput("region", "Region", # abstract= countries_longname(), # need to handle special non-ascii char in countries. default='DEU', data_type='string', min_occurs=1, max_occurs=len(countries()), allowed_values=countries()), # REGION_EUROPE #COUNTRIES ] ########### # OUTPUTS ########### outputs = [ ComplexOutput('output_nc', "Subsets", abstract="Tar archive containing the netCDF files", as_reference=True, supported_formats=[Format("application/x-tar")], ), ComplexOutput('output_factsheet', "Climate Fact Sheet", abstract="Short overview of the climatological situation of the selected countries", as_reference=True, supported_formats=[Format('application/pdf')], ), ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[Format("text/plain")]), ] super(FactsheetProcess, self).__init__( self._handler, identifier="climatefactsheet", title="Climate Fact Sheet Generator (init)", version="0.2", abstract="Returns a pdf with a short overview of the climatological situation for the selected countries", metadata=[ # {"title": "LSCE", "href": "http://www.lsce.ipsl.fr/en/index.php"}, {"title": "Doc", "href": "http://flyingpigeon.readthedocs.io/en/latest/"}, ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True )
def __init__(self): inputs = [ ComplexInput( 'netcdf_file', 'NetCDF File', abstract='You may provide a URL or upload a NetCDF file.', metadata=[Metadata('Info')], min_occurs=1, max_occurs=100, supported_formats=[Format('application/x-netcdf')]), LiteralInput('operator', 'CDO Operator', data_type='string', abstract="Choose a CDO Operator", default='remapbil', min_occurs=0, max_occurs=1, allowed_values=[ 'remapbil', 'remapbic', 'remapdis', 'remapnn', 'remapcon', 'remapcon2', 'remaplaf' ]), ] outputs = [ ComplexOutput( 'tarout', 'Result files', abstract="Tar archive containing the netCDF result files", as_reference=True, supported_formats=[Format('application/x-tar')]), ComplexOutput('output', 'Output', abstract="One regrided file.", as_reference=True, supported_formats=[Format('application/x-netcdf')]), ] super(Inter_Sub, self).__init__( self._handler, identifier="regrsub", title="CDO Remapping and Subsetting", abstract="CDO Remapping and Subsetting of NetCDF File(s)", version=cdo_version, metadata=[ Metadata('Birdhouse', 'http://bird-house.github.io/'), Metadata( 'User Guide', 'http://birdhouse-hummingbird.readthedocs.io/en/latest/'), Metadata('CDO Homepage', 'https://code.zmaw.de/projects/cdo'), Metadata( 'CDO Documentation', 'https://code.zmaw.de/projects/cdo/embedded/index.html'), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def test_complex_output(self): complexo = ComplexOutput("complex", "Complex foo", [Format("GML")]) doc = complexo.describe_xml() [outpt] = xpath_ns(doc, "/Output") [default] = xpath_ns(doc, "/Output/ComplexOutput/Default/Format/MimeType") supported = xpath_ns(doc, "/Output/ComplexOutput/Supported/Format/MimeType") assert default.text == "application/gml+xml" assert len(supported) == 1
def __init__(self): inputs = [ LiteralInput( 'metric', 'Metric', abstract='Choose a metric to calculate.', data_type='string', allowed_values=['t10p', 't90p', 'rx5day', 'Wx'], # 'cdd' <- these do not work default='Wx'), ] self.plotlist = [] outputs = [ ComplexOutput('plot', 'Combined Climate Extreme Index plot', abstract='Combined Climate Extreme Index plot.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('data', 'Combined Climate Extreme Index data', abstract='Combined Climate Extreme Index data.', as_reference=True, supported_formats=[Format('application/zip')]), ComplexOutput( 'archive', 'Archive', abstract= 'The complete output of the ESMValTool processing as an zip archive.', as_reference=True, supported_formats=[Format('application/zip')]), *default_outputs(), ] super(ExtremeIndex, self).__init__( self._handler, identifier="extreme_index", title="Combined Climate Extreme Index", version=runner.VERSION, abstract= """Metric showing extreme indices relevant to the insurance industry (heat, cold, wind, flood and drought indices).""", metadata=[ Metadata('Estimated Calculation Time', '2 minutes'), Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata( 'Documentation', 'https://esmvaltool.readthedocs.io/en/latest/recipes/recipe_combined_climate_extreme_index.html', role=util.WPS_ROLE_DOC), Metadata('Media', util.diagdata_url() + '/risk_index/insurance_risk_indices.png', role=util.WPS_ROLE_MEDIA), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): inputs = [ LiteralInput('model', 'Model', abstract='Choose a model like MPI-ESM-LR.', data_type='string', allowed_values=['MPI-ESM-LR', 'MPI-ESM-MR'], default='MPI-ESM-LR'), LiteralInput('experiment', 'Experiment', abstract='Choose an experiment like historical.', data_type='string', allowed_values=['historical', 'rcp26', 'rcp45', 'rcp85'], default='historical'), LiteralInput('ensemble', 'Ensemble', abstract='Choose an ensemble like r1i1p1.', data_type='string', allowed_values=['r1i1p1', 'r2i1p1', 'r3i1p1'], default='r1i1p1'), LiteralInput('start_year', 'Start year', data_type='integer', abstract='Start year of model data.', default="1990"), LiteralInput('end_year', 'End year', data_type='integer', abstract='End year of model data.', default="2000"), ] outputs = [ ComplexOutput('namelist', 'namelist', abstract='ESMValTool namelist used for processing.', as_reference=True, supported_formats=[Format('text/plain')]), ComplexOutput('log', 'Log File', abstract='Log File of ESMValTool processing.', as_reference=True, supported_formats=[Format('text/plain')]), ComplexOutput('output', 'Output plot', abstract='Generated output plot of ESMValTool processing.', as_reference=True, supported_formats=[Format('application/pdf')]), ] super(MyDiag, self).__init__( self._handler, identifier="mydiag", title="ESMValTool: tutorial diagnostic.", version="1.0", abstract="Tutorial diagnostic used in the doc/toy-diagnostic-tutorial.pdf." " The default run uses the following CMIP5 data: " "project=CMIP5, experiment=historical, ensemble=r1i1p1, variable=ta, model=MPI-ESM-LR, time_frequency=mon", # noqa metadata=[ Metadata('Birdhouse', 'http://bird-house.github.io/'), Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata('ESGF Testdata', 'https://esgf1.dkrz.de/thredds/catalog/esgcet/7/cmip5.output1.MPI-M.MPI-ESM-LR.historical.mon.atmos.Amon.r1i1p1.v20120315.html?dataset=cmip5.output1.MPI-M.MPI-ESM-LR.historical.mon.atmos.Amon.r1i1p1.v20120315.ta_Amon_MPI-ESM-LR_historical_r1i1p1_199001-199912.nc'), # noqa ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def test_complex_output(self): complexo = ComplexOutput('complex', 'Complex foo', [Format('GML')]) doc = complexo.describe_xml() [outpt] = xpath_ns(doc, '/Output') [default] = xpath_ns(doc, '/Output/ComplexOutput/Default/Format/MimeType') supported = xpath_ns(doc, '/Output/ComplexOutput/Supported/Format/MimeType') assert default.text == 'application/gml+xml' assert len(supported) == 1
def __init__(self): self.variables = ['tasmax', 'tasmin'] self.frequency = 'day' inputs = [ *model_experiment_ensemble(model='MPI-ESM-MR', experiment='rcp85', ensemble='r1i1p1', max_occurs=1, required_variables=self.variables, required_frequency=self.frequency, exclude_historical=True), *historic_projection_year_ranges(1990, 2000, 2070, 2080), *region(-10, 40, 27, 70) ] self.plotlist = [] outputs = [ ComplexOutput('plot', 'Diurnal Temperature Variation (DTR) Indicator plot', abstract='The diurnal temperature indicator to estimate energy demand.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('data', 'Diurnal Temperature Variation (DTR) Indicator data', abstract='The diurnal temperature indicator data.', as_reference=True, supported_formats=[Format('application/zip')]), ComplexOutput('archive', 'Archive', abstract='The complete output of the ESMValTool processing as an zip archive.', as_reference=True, supported_formats=[Format('application/zip')]), *default_outputs(), ] super(DiurnalTemperatureIndex, self).__init__( self._handler, identifier="diurnal_temperature_index", title="Diurnal Temperature Variation (DTR) Indicator", version=runner.VERSION, abstract=""" Metric showing the diurnal temperature indicator to estimate energy demand. The estimated calculation time of this process is 6 minutes for the default values supplied. """, metadata=[ Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata( 'Documentation', 'https://esmvaltool.readthedocs.io/en/v2.0a2/recipes/recipe_diurnal_temperature_index.html', # noqa role=util.WPS_ROLE_DOC), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def test_store(self): vector_output = ComplexOutput('vector', 'Vector output', supported_formats=[FORMATS.GML]) vector_output.file = get_vector_file() store_file = self.storage.store(vector_output) assert len(store_file) == 3 assert store_file[0] == STORE_TYPE.PATH assert isinstance(store_file[1], str) assert isinstance(store_file[2], str)
def __init__(self): inputs = [ ComplexInput( "workflow", "Workflow", min_occurs=1, max_occurs=1, supported_formats=[FORMATS.JSON], ), ] outputs = [ ComplexOutput( "output", "METALINK v4 output", abstract= "Metalink v4 document with references to NetCDF files.", as_reference=True, supported_formats=[FORMATS.META4], ), ComplexOutput( "prov", "Provenance", abstract="Provenance document using W3C standard.", as_reference=True, supported_formats=[FORMATS.JSON], ), ComplexOutput( "prov_plot", "Provenance Diagram", abstract="Provenance document as diagram.", as_reference=True, supported_formats=[ Format("image/png", extension=".png", encoding="base64") ], ), ] super(Orchestrate, self).__init__( self._handler, identifier="orchestrate", title="Orchestrate", abstract= "Run a workflow with combined operations. A workflow can be build using the rooki client.", metadata=[ Metadata("Rooki", "https://github.com/roocs/rooki"), ], version="1.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True, )
def __init__(self): inputs = [] self.plotlist = [] outputs = [ ComplexOutput( 'plot', 'Capacity Factor of Wind Power plot', abstract= 'Ratio of average estimated power to theoretical maximum power.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput( 'data', 'Capacity Factor of Wind Power data', abstract= 'Ratio of average estimated power to theoretical maximum power.', as_reference=True, supported_formats=[Format('application/zip')]), ComplexOutput( 'archive', 'Archive', abstract= 'The complete output of the ESMValTool processing as an zip archive.', as_reference=True, supported_formats=[Format('application/zip')]), *default_outputs(), ] super(CapacityFactor, self).__init__( self._handler, identifier="capacity_factor", title="Capacity factor of wind power", version=runner.VERSION, abstract= """Metric showing the wind capacity factor to estimate energy supply.""", metadata=[ Metadata('Estimated Calculation Time', '1 minute'), Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata( 'Documentation', 'https://esmvaltool.readthedocs.io/en/latest/recipes/recipe_capacity_factor.html', role=util.WPS_ROLE_DOC), # Metadata( # 'Media', # util.diagdata_url() + '/capacity_factor/diurnal_temperature_variation.png', # role=util.WPS_ROLE_MEDIA), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): inputs = [ ComplexInput( "analog_result", "Analogues result file", abstract= "Analogues text file computed by Analogues of Corculation processes", min_occurs=1, max_occurs=1, # maxmegabites=5000, supported_formats=[Format('text/plain')]), ] outputs = [ ComplexOutput( "output_html", "html viewer", abstract="web browser compatible html file", supported_formats=[Format("text/html")], as_reference=True, ), ComplexOutput( "output_txt", "modified analogues txt file", abstract="Formated analogues file for viewer", supported_formats=[Format("text/plain")], as_reference=True, ), ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[Format('text/plain')]), ] super(AnalogsviewerProcess, self).__init__( self._handler, identifier="analogs_viewer", title="Analogues of circulation (visualization of analogs data)", abstract="Visualisation of text output of analogue process", version="0.10", metadata=[ Metadata('LSCE', 'http://www.lsce.ipsl.fr/en/index.php'), Metadata('Doc', 'http://flyingpigeon.readthedocs.io/en/latest/'), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [ ComplexInput( 'resource', 'Resource', abstract= "NetCDF Files or archive (tar/zip) containing netCDF files.", min_occurs=1, max_occurs=1000, # maxmegabites=5000, supported_formats=[ Format('application/x-netcdf'), Format('application/x-tar'), Format('application/zip'), ]) ] outputs = [ ComplexOutput( "output", "Fetched Files", abstract="File containing the local paths to downloaded files.", supported_formats=[Format('text/plain')], as_reference=True, ), ComplexOutput( "output_log", "Logging information", abstract="Collected logs during process run.", supported_formats=[Format("text/plain")], as_reference=True, ) ] super(FetchProcess, self).__init__( self._handler, identifier="fetch_resources", title="Fetch Resources", version="0.10", abstract="Fetch data resources (limited to 50GB) to the local file" "system of the birdhouse compute provider.", metadata=[ Metadata('Documentation', 'http://flyingpigeon.readthedocs.io/en/latest/'), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def test_complex_output(self): complexo = ComplexOutput('complex', 'Complex foo', [Format('GML')], keywords=['kw1', 'kw2']) doc = complexo.describe_xml() [outpt] = xpath_ns(doc, '/Output') [default] = xpath_ns(doc, '/Output/ComplexOutput/Default/Format/MimeType') supported = xpath_ns(doc, '/Output/ComplexOutput/Supported/Format/MimeType') assert default.text == 'application/gml+xml' assert len(supported) == 1 [keywords] = xpath_ns(doc, '/Output/ows:Keywords') kws = xpath_ns(keywords, './ows:Keyword') assert keywords is not None assert len(kws) == 2
def test_complex_output(self): complexo = ComplexOutput('complex', 'Complex foo', [Format('GML')], keywords=['kw1', 'kw2']) doc = complexo.describe_xml() [outpt] = xpath_ns(doc, '/Output') [default] = xpath_ns(doc, '/Output/ComplexOutput/Default/Format/MimeType') supported = xpath_ns(doc, '/Output/ComplexOutput/Supported/Format/MimeType') assert default.text == 'application/gml+xml' assert len(supported) == 1 [keywords] = xpath_ns(doc, '/Output/ows:Keywords') kws = xpath_ns(keywords, './ows:Keyword') assert keywords is not None assert len(kws) == 2
def __init__(self): inputs = [ ComplexInput('resource', 'Resource', abstract='NetCDF Files or archive (tar/zip) containing NetCDF files.', metadata=[Metadata('Info')], min_occurs=1, max_occurs=1000, supported_formats=[ Format('application/x-netcdf'), Format('application/x-tar'), Format('application/zip'), ]), LiteralInput("coords", "Coordinates", abstract="A comma-seperated tuple of WGS85 lon,lat decimal coordinates (e.g. 2.356138, 48.846450)", # noqa default="2.356138, 48.846450", data_type='string', min_occurs=1, max_occurs=100, ), ] outputs = [ ComplexOutput('output_log', 'Logging information', abstract="Collected logs during process run.", as_reference=True, supported_formats=[Format('text/plain')] ), ComplexOutput('tarout', 'Subsets', abstract="Tar archive containing one CSV file per input file, each one storing time series column-wise for all point coordinates.", as_reference=True, supported_formats=[Format('application/x-tar')] ), ] super(PointinspectionProcess, self).__init__( self._handler, identifier="pointinspection", title="Point Inspection", abstract='Extract the timeseries at the given coordinates.', version="0.10", metadata=[ Metadata('LSCE', 'http://www.lsce.ipsl.fr/en/index.php'), Metadata('Doc', 'http://flyingpigeon.readthedocs.io/en/latest/'), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True, )
def __init__(self): inputs = [] self.plotlist = [] outputs = [ ComplexOutput( 'plot', 'Diurnal Temperature Variation (DTR) Indicator plot', abstract= 'The diurnal temperature indicator to estimate energy demand.', as_reference=True, supported_formats=[Format('image/png')]), ComplexOutput('data', 'Diurnal Temperature Variation (DTR) Indicator data', abstract='The diurnal temperature indicator data.', as_reference=True, supported_formats=[Format('application/zip')]), ComplexOutput( 'archive', 'Archive', abstract= 'The complete output of the ESMValTool processing as an zip archive.', as_reference=True, supported_formats=[Format('application/zip')]), *default_outputs(), ] super(DiurnalTemperatureIndex, self).__init__( self._handler, identifier="diurnal_temperature_index", title="Diurnal Temperature Variation (DTR) Indicator", version=runner.VERSION, abstract= """Metric showing the diurnal temperature indicator to estimate energy demand.""", metadata=[ Metadata('Estimated Calculation Time', '6 minutes'), Metadata('ESMValTool', 'http://www.esmvaltool.org/'), Metadata( 'Documentation', 'https://esmvaltool.readthedocs.io/en/latest/recipes/recipe_diurnal_temperature_index.html', role=util.WPS_ROLE_DOC), # Metadata( # 'Media', # util.diagdata_url() + '/dtr/diurnal_temperature_variation.png', # role=util.WPS_ROLE_MEDIA), ], inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): inputs = [ LiteralInput('times', 'Times', abstract='Generates a random chomsky text.', default='5', data_type='integer') ] outputs = [ ComplexOutput('output', 'Chomsky text', as_reference=True, supported_formats=[Format('text/plain')]) ] super(Chomsky, self).__init__(self._handler, identifier="chomsky", title="Chomsky text generator", version="1.0", abstract="Generates a random chomsky text", inputs=inputs, outputs=outputs, status_supported=True, store_supported=True)
def __init__(self): inputs = [ LiteralInput('process', 'Process for which to return the available data', abstract='Process for which to return the available data.', default='', data_type='string', min_occurs=0, max_occurs=1) ] outputs = [ ComplexOutput('drs', 'CMIP DRS Tree for available data', supported_formats=[Format('application/json')], as_reference=False) ] super(Meta, self).__init__( self._handler, identifier='meta', version='1.0', title='Meta process', abstract="""This is not a Metric. This process returns the available model data for the metric processes in this WPS service.""", profile='', metadata=[ Metadata('MAGIC WPS Metadata process', 'https://c3s-magic-wps.readthedocs.io/en/latest/'), ], inputs=inputs, outputs=outputs, store_supported=False, status_supported=False)
def __init__(self): inputs = [ ComplexInput( 'resource', "NetCDF file", abstract="Link to NetCDF or NcML file on this server", supported_formats=[ FORMATS.NETCDF, ], # FORMATS.NCML], to become available in PyWPS 4.2.5 min_occurs=1, max_occurs=1) ] outputs = [ ComplexOutput('dap', 'DAP url', as_reference=True, supported_formats=[FORMATS.DODS]), ] super(NcToDap, self).__init__( self._handler, identifier='nc_to_dap', title="Convert file URL to DAP URL", abstract= "Return Data Access Protocol link to a netCDF or NcML file.", version="1", metadata=[ MetadataUrl('User Guide', 'http://emu.readthedocs.io/en/latest/', anonymous=True), ], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default=None, min_occurs=0, max_occurs=1, abstract='Enter the variable name (variable will be detected if not set)'), ] outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ LiteralInput('input 1', 'Input1 number', default='100', data_type='integer'), ComplexInput('input-2', 'json input', supported_formats=[FORMATS.JSON, ]), ] outputs = [ LiteralOutput('output.1', 'Add 1 to `input 1`.', data_type='float'), ComplexOutput('output 2', 'Same thing as input-2.', supported_formats=[FORMATS.JSON, ]), ] super(NonPyID, self).__init__( self._handler, identifier='non.py-id', # TODO:fails with pywps: u'fake.process-for testing &é;' title="Dummy process including non-pythonic identifiers", abstract="Dummy process whose process, input and output identifiers include characters not allowed " "in Python.", version="1.0", inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def try_connect(self, graph, linked_input, downstream_task, downstream_task_input): """ Override TaskPE fct. See TaskPE.try_connect for details. The MapPE uses the downstream task input format to set it's own output format and it's set upon connection """ # Set the supported output description which is the same as the downstream task supported input if TaskPE.try_connect(self, graph, linked_input, downstream_task, downstream_task_input): down_task_in_desc = downstream_task.get_input_desc( downstream_task_input) params = dict(identifier=self.MAP_OUTPUT, title=self.MAP_OUTPUT) if down_task_in_desc.dataType == 'ComplexData': params['supported_formats'] = [ Format(mime_type=down_task_in_desc.defaultValue.mimeType, schema=down_task_in_desc.defaultValue.schema, encoding=down_task_in_desc.defaultValue.encoding) ] params['as_reference'] = False self.output_desc = Output( ComplexOutput(**params).describe_xml()) self.output_desc.mimeType = down_task_in_desc.defaultValue.mimeType elif down_task_in_desc.dataType == 'BoundingBoxData': params['crss'] = down_task_in_desc.supportedValues params['as_reference'] = False self.output_desc = Output( BoundingBoxOutput(**params).describe_xml()) else: params['data_type'] = down_task_in_desc.dataType self.output_desc = Output( LiteralOutput(**params).describe_xml()) return True return False