def __init__(self): inputs = [ ComplexInput('poly_in', 'Input1', supported_formats=[get_format('GML')], max_occurs='2'), LiteralInput('buffer', 'Buffer', data_type='float') ] outputs = [ ComplexOutput('buff_out', 'Buffered', supported_formats=[get_format('GML')]) ] super(Buffer, self).__init__( self._handler, identifier='buffer', version='0.1', title="Brauni's 1st process", abstract='This process is the best ever being coded', profile='', metadata=['Process', '1st', 'Hilarious'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def create_feature(): def feature(request, response): input = request.inputs['input'][0].file # What do we need to assert a Complex input? #assert type(input) is text_type # open the input file try: inSource = ogr.Open(input) except Exception as e: return "Could not open given vector file: %s" % e inLayer = inSource.GetLayer() # create output file out = 'point' outPath = os.path.join(tempfile.gettempdir(), out) driver = ogr.GetDriverByName('GML') outSource = driver.CreateDataSource( outPath, ["XSISCHEMAURI=http://schemas.opengis.net/gml/2.1.2/feature.xsd"]) outLayer = outSource.CreateLayer(out, None, ogr.wkbUnknown) # get the first feature inFeature = inLayer.GetNextFeature() inGeometry = inFeature.GetGeometryRef() # make the buffer buff = inGeometry.Buffer(float(100000)) # create output feature to the file outFeature = ogr.Feature(feature_def=outLayer.GetLayerDefn()) outFeature.SetGeometryDirectly(buff) outLayer.CreateFeature(outFeature) outFeature.Destroy() response.outputs['output'].output_format = Format( **FORMATS.GML._asdict()) response.outputs['output'].file = outPath return response return Process(handler=feature, identifier='feature', title='Process Feature', inputs=[ ComplexInput('input', 'Input', supported_formats=[get_format('GML')]) ], outputs=[ ComplexOutput('output', 'Output', supported_formats=[get_format('GML')]) ])
def __init__(self): inputs = [ComplexInput('layer', 'Layer', supported_formats=[get_format('GML')])] outputs = [ComplexOutput('out', 'Referenced Output', supported_formats=[get_format('JSON')])] super(Centroids, self).__init__( self._handler, identifier='centroids', title='Process Centroids', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ComplexInput('raster', 'Raster input', supported_formats=[get_format('GEOTIFF')])] outputs = [ComplexOutput('raster', 'Raster output', supported_formats=[get_format('GEOTIFF')])] super(Warp, self).__init__( self._handler, identifier='warp', title='GDAL Warp', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def create_feature(): def feature(request, response): input = request.inputs["input"][0].file # What do we need to assert a Complex input? # assert type(input) is text_type # open the input file try: inSource = ogr.Open(input) except Exception as e: return "Could not open given vector file: %s" % e inLayer = inSource.GetLayer() # create output file out = "point" outPath = os.path.join(tempfile.gettempdir(), out) driver = ogr.GetDriverByName("GML") outSource = driver.CreateDataSource(outPath, ["XSISCHEMAURI=http://schemas.opengis.net/gml/2.1.2/feature.xsd"]) outLayer = outSource.CreateLayer(out, None, ogr.wkbUnknown) # get the first feature inFeature = inLayer.GetNextFeature() inGeometry = inFeature.GetGeometryRef() # make the buffer buff = inGeometry.Buffer(float(100000)) # create output feature to the file outFeature = ogr.Feature(feature_def=outLayer.GetLayerDefn()) outFeature.SetGeometryDirectly(buff) outLayer.CreateFeature(outFeature) outFeature.Destroy() response.outputs["output"].output_format = Format(**FORMATS.GML._asdict()) response.outputs["output"].file = outPath return response return Process( handler=feature, identifier="feature", title="Process Feature", inputs=[ComplexInput("input", "Input", supported_formats=[get_format("GML")])], outputs=[ComplexOutput("output", "Output", supported_formats=[get_format("GML")])], )
def __init__(self): inputs = [ ComplexInput('layer', 'Layer', supported_formats=[get_format('GML')]) ] outputs = [ ComplexOutput('out', 'Referenced Output', supported_formats=[get_format('JSON')]) ] super(Centroids, self).__init__(self._handler, identifier='centroids', title='Process Centroids', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def create_sum_one(): def sum_one(request, response): input = request.inputs['input'][0].file # What do we need to assert a Complex input? # assert type(input) is text_type import grass.script as grass # Import the raster and set the region if grass.run_command( "r.in.gdal", flags="o", out="input", input=input, quiet=True) != 0: raise NoApplicableCode("Could not import cost map. " "Please check the WCS service.") if grass.run_command("g.region", flags="a", rast="input") != 0: raise NoApplicableCode("Could not set GRASS region.") # Add 1 if grass.mapcalc("$output = $input + $value", output="output", input="input", value=1.0, quiet=True): raise NoApplicableCode("Could not use GRASS map calculator.") # Export the result _, out = tempfile.mkstemp() os.environ['GRASS_VERBOSE'] = '-1' if grass.run_command("r.out.gdal", flags="f", input="output", type="UInt16", output=out, overwrite=True) != 0: raise NoApplicableCode("Could not export result from GRASS.") del os.environ['GRASS_VERBOSE'] response.outputs['output'].file = out return response return Process(handler=sum_one, identifier='sum_one', title='Process Sum One', inputs=[ ComplexInput('input', title='Input', supported_formats=[Format('image/img')]) ], outputs=[ ComplexOutput('output', title='Output', supported_formats=[get_format('GEOTIFF')]) ], grass_location='epsg:4326')
def __init__(self): inputs = [ComplexInput('layer', 'Layer', [get_format('GML')])] outputs = [LiteralOutput('area', 'Area', data_type='string')] super(Area, self).__init__(self._handler, identifier='area', title='Process Area', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def __init__(self): inputs = [ ComplexInput('raster', 'Raster input', supported_formats=[get_format('GEOTIFF')]) ] outputs = [ ComplexOutput('raster', 'Raster output', supported_formats=[get_format('GEOTIFF')]) ] super(Warp, self).__init__(self._handler, identifier='warp', title='GDAL Warp', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True)
def get_output_desc(self, output_name): """ Implement TaskPE fct. See TaskPE.get_output_desc for details. """ if output_name == self.REDUCE_OUTPUT: return Output( ComplexOutput(self.REDUCE_OUTPUT, self.REDUCE_OUTPUT, supported_formats=[get_format('JSON')], as_reference=False).describe_xml()) return None
def get_input_desc(self, input_name): """ Implement TaskPE fct. See TaskPE.get_input_desc for details. """ if input_name == self.MAP_INPUT: return Input( ComplexInput(self.MAP_INPUT, self.MAP_INPUT, supported_formats=[get_format('JSON')], min_occurs=1, max_occurs=sys.maxint).describe_xml()) return None
def __init__(self): inputs = [ComplexInput('poly_in', 'Input1', supported_formats=[get_format('GML')], max_occurs='2'), LiteralInput('buffer', 'Buffer', data_type='float') ] outputs = [ComplexOutput('buff_out', 'Buffered', supported_formats=[get_format('GML')])] super(Buffer, self).__init__( self._handler, identifier='buffer', version='0.1', title="Brauni's 1st process", abstract='This process is the best ever being coded', profile='', metadata=['Process', '1st', 'Hilarious'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ComplexInput('layer', 'Layer', [get_format('GML')])] outputs = [LiteralOutput('area', 'Area', data_type='string')] super(Area, self).__init__( self._handler, identifier='area', title='Process Area', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def __init__(self): inputs = [ComplexInput('layer', 'Layer', supported_formats=[get_format('GML')])] outputs = [LiteralOutput('count', 'Count', data_type='integer')] super(FeatureCount, self).__init__( self._handler, identifier='feature_count', version='None', title='Feature count', abstract='This process counts the number of features in a vector', profile='', metadata=['Feature', 'Count'], inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )
def _handler(self, request, response): from osgeo import ogr inSource = ogr.Open(request.inputs['poly_in'][0].file) inLayer = inSource.GetLayer() out = inLayer.GetName() outPath = os.path.join(tempfile.gettempdir(), out) # create output file driver = ogr.GetDriverByName('GML') outSource = driver.CreateDataSource( outPath, ["XSISCHEMAURI=http://schemas.opengis.net/gml/2.1.2/feature.xsd"]) outLayer = outSource.CreateLayer(out, None, ogr.wkbUnknown) # for each feature featureCount = inLayer.GetFeatureCount() index = 0 import time while index < featureCount: # get the geometry inFeature = inLayer.GetNextFeature() inGeometry = inFeature.GetGeometryRef() # make the buffer buff = inGeometry.Buffer(float(request.inputs['buffer'][0].data)) # create output feature to the file outFeature = ogr.Feature(feature_def=outLayer.GetLayerDefn()) outFeature.SetGeometryDirectly(buff) outLayer.CreateFeature(outFeature) outFeature.Destroy() # makes it crash when using debug index += 1 time.sleep(1) # making things little bit slower response.update_status( "Calculating buffer for feature %d from %d" % (index + 1, featureCount), (100 * (index + 1) / featureCount * 1)) response.outputs['buff_out'].data_format = get_format('GML') response.outputs['buff_out'].file = outPath return response
def _handler(self, request, response): from osgeo import ogr inSource = ogr.Open(request.inputs['poly_in'][0].file) inLayer = inSource.GetLayer() out = inLayer.GetName() outPath = os.path.join(tempfile.gettempdir(), out) # create output file driver = ogr.GetDriverByName('GML') outSource = driver.CreateDataSource(outPath, ["XSISCHEMAURI=http://schemas.opengis.net/gml/2.1.2/feature.xsd"]) outLayer = outSource.CreateLayer(out, None, ogr.wkbUnknown) # for each feature featureCount = inLayer.GetFeatureCount() index = 0 import time while index < featureCount: # get the geometry inFeature = inLayer.GetNextFeature() inGeometry = inFeature.GetGeometryRef() # make the buffer buff = inGeometry.Buffer(float(request.inputs['buffer'][0].data)) # create output feature to the file outFeature = ogr.Feature(feature_def=outLayer.GetLayerDefn()) outFeature.SetGeometryDirectly(buff) outLayer.CreateFeature(outFeature) outFeature.Destroy() # makes it crash when using debug index += 1 time.sleep(1) # making things little bit slower response.update_status("Calculating buffer for feature %d from %d" % (index + 1, featureCount), (100 * (index + 1) / featureCount * 1)) response.outputs['buff_out'].data_format = get_format('GML') response.outputs['buff_out'].file = outPath return response
def grass_file_based_location(): """Return a Process creating a GRASS location from a georeferenced file.""" def file_location(request, response): """Check whether the datum of a mapset corresponds the file one.""" from grass.script import parse_command g_proj = parse_command('g.proj', flags='g') assert g_proj['datum'] == 'wgs84', \ 'Error in creating a GRASS location based on a file' return response inputs = [ComplexInput(identifier='input1', supported_formats=[get_format('GEOTIFF')], title="Name of input vector map")] return Process(handler=file_location, identifier='my_file_based_location', title='File location', inputs=inputs, grass_location="complexinput:input1")
from pywps import LiteralInput, ComplexOutput from pavics import nccombo # Example usage: # # localhost/pywps?service=WPS&request=execute&version=1.0.0&\ # identifier=getpoint&DataInputs=\ # opendap_url=http://132.217.140.45:8083/thredds/dodsC/birdhouse/ouranos/\ # subdaily/aev/shum/aev_shum_1962.nc;\ # opendap_url=http://132.217.140.45:8083/thredds/dodsC/birdhouse/ouranos/\ # subdaily/aev/shum/aev_shum_1963.nc;variable=SHUM;ordered_indice=0;\ # ordered_indice=0,ordered_indice=70,ordered_indice=30 json_output_path = configuration.get_config_value('server', 'outputpath') json_format = get_format('JSON') class GetPoint(Process): def __init__(self): # From pywps4 code : time_format = '%Y-%m-%dT%H:%M:%S%z' # Is that a bug? %z should be %Z # Using 'string' data_type until this is corrected. inputs = [ LiteralInput('opendap_url', 'OPeNDAP url to NetCDF file', abstract='OPeNDAP url to NetCDF file.', data_type='string', max_occurs=100000), LiteralInput('variable', 'NetCDF variable name',
# values in the Solr database. This could be an option as an input... # The list of metadata to scan should be in a config file, let's input # it manually for now: my_facets = ['experiment', 'frequency', 'institute', 'model', 'project'] # variable, variable_long_name and cf_standard_name, are not necessarily # in the global attributes, need to come back for this later... # This list of ignored thredds directories could also be a config... my_thredds_ignore = ['birdhouse/wps_outputs', 'birdhouse/workspaces'] # The user under which apache is running must be able to write to that # directory. output_path = configuration.get_config_value('server', 'outputpath') json_format = get_format('JSON') gmlxml_format = get_format('GML') text_format = get_format('TEXT') class PavicsCrawler(Process): def __init__(self): self.solr_server = os.environ.get('SOLR_HOST', None) env_thredds_host = os.environ.get('THREDDS_HOST', '') self.wms_alternate_server = os.environ.get( 'WMS_ALTERNATE_SERVER', None) self.thredds_servers = map(str.strip, env_thredds_host.split(',')) self.magpie_host = os.environ.get('MAGPIE_HOST', None) self.magpie_credentials = dict( provider_name='ziggurat', user_name=os.environ.get('MAGPIE_USER', ''),
# Example usage: # # List facets values: # localhost/pywps?service=WPS&request=execute&version=1.0.0&\ # identifier=pavicsearch&DataInputs=facets=* # # Search by facet: # localhost/pywps?service=WPS&request=execute&version=1.0.0&\ # identifier=pavicsearch&DataInputs=constraints=model:CRCM4,experiment:rcp85 # The user under which apache is running must be able to write to that # directory. json_output_path = configuration.get_config_value('server', 'outputpath') json_format = get_format('JSON') gmlxml_format = get_format('GML') class PavicsSearch(Process): def __init__(self): self.solr_server = os.environ.get('SOLR_HOST', None) self.magpie_host = os.environ.get('MAGPIE_HOST', None) svc_name = os.environ.get('THREDDS_HOST_MAGPIE_SVC_NAME', '') self.magpie_thredds_servers = { svc_name: host for svc_name, host in zip( map(str.strip, svc_name.split(',')), map(str.strip, os.environ.get('THREDDS_HOST', '').split(','))) }
import ESMF import ocgis from eggshell.log import init_process_logger from pywps import ComplexInput, ComplexOutput from pywps import Format, configuration, get_format from pywps import LiteralInput from pywps import Process from pywps.app.Common import Metadata from flyingpigeon.utils import archiveextract from flyingpigeon.utils import rename_complexinputs LOGGER = logging.getLogger("PYWPS") json_format = get_format('JSON') # Supported interpolation methods methods = list(map(str.lower, ESMF.RegridMethod.__members__.keys())) def extract_doc(): """Format the documentation about the ESMF regridding methods.""" import inspect import re source = inspect.getsource(ESMF.RegridMethod) doc = source.replace('"""', '') def title(match): [name] = match.groups()
# resource=http://x.x.x.x:8083/thredds/dodsC/birdhouse/ncep/cfsr/pr/\ # pr_1hr_cfsr_reanalysis_197901.nc;\ # typename=usa:states;featureids=states.4 env_geoserver_host = os.environ['GEOSERVER_HOST'] wfs_server = ("http://{0}/geoserver/ows?service=WFS&version=1.0.0" "&request=GetFeature&typeName=") wfs_server = wfs_server.format(env_geoserver_host) netcdf_output_path = configuration.get_config_value('server', 'outputpath') # In the context of the wps server running from docker, where the output # path comes from a docker volume, we may need to recreate this directory: if not os.path.isdir(netcdf_output_path): os.makedirs(netcdf_output_path) json_format = get_format('JSON') netcdf_format = get_format('NETCDF') # This should really be somewhere else... def conn_port_fix(conn_fn, netloc): decode_netloc = netloc.split(':') if len(decode_netloc) == 1: return conn_fn(netloc) else: return conn_fn(decode_netloc[0], decode_netloc[-1]) # This should really be somewhere else... def url_result(url): parsed_url = urlparse(url)
from pavics import catalog # Example usage: # localhost/pywps?service=WPS&request=execute&version=1.0.0&\ # identifier=pavicsupdate&DataInputs=source=source_string;url=url_string;\ # updates=subject:new_subject,units:m # Still need to perhaps validate the inputs, and consider whether we want # to do updates that involve list of entries (not tested yet) # The user under which apache is running must be able to write to that # directory. json_output_path = configuration.get_config_value('server', 'outputpath') json_format = get_format('JSON') gmlxml_format = get_format('GML') class PavicsUpdate(Process): def __init__(self): self.solr_server = os.environ.get('SOLR_HOST', None) # The combination of the 'source' and 'url' fields provide the 'id' # in the Solr database, they both must be provided. inputs = [LiteralInput('id', 'id field of the dataset or file', abstract='id field of the dataset or file.', data_type='string'), LiteralInput('type', 'Dataset or File', abstract=('The File type will update a single '