def _executeRequest(self, processid, inputs, verbose): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) # if verbose=True, then will we will monitor the status of the call. # if verbose=False, then we will return only the file outputpath. if not verbose: # redirects the standard output to avoid printing request status old_stdout = sys.stdout result = StringIO() sys.stdout = result # executes the request execution = wps.execute(processid, inputs, output="OUTPUT") monitorExecution(execution, download=True) # sets the standard output back to original sys.stdout = old_stdout result_string = result.getvalue() #parses the redirected output to get the filepath of the saved file output = result_string.split('\n') tmp = output[len(output) - 2].split(' ') return tmp[len(tmp) - 1] # executes the request execution = wps.execute(processid, inputs, output="OUTPUT") monitorExecution(execution, download=True)
def _executeRequest(self, processid, inputs, verbose): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) # if verbose=True, then will we will monitor the status of the call. # if verbose=False, then we will return only the file outputpath. if not verbose: # redirects the standard output to avoid printing request status old_stdout = sys.stdout result = StringIO() sys.stdout = result # executes the request execution = wps.execute(processid, inputs, output = "OUTPUT") monitorExecution(execution, download=True) # sets the standard output back to original sys.stdout = old_stdout result_string = result.getvalue() #parses the redirected output to get the filepath of the saved file output = result_string.split('\n') tmp = output[len(output) - 2].split(' ') return tmp[len(tmp)-1] # executes the request execution = wps.execute(processid, inputs, output = "OUTPUT") monitorExecution(execution, download=True)
def test_wps_response6(): # Build WPS object; service has been down for some time so skip caps here wps = WebProcessingService('http://rsg.pml.ac.uk/wps/vector.cgi', skip_caps=True) # Execute face WPS invocation request = open(resource_file('wps_PMLExecuteRequest6.xml'), 'rb').read() response = open(resource_file('wps_PMLExecuteResponse6.xml'), 'rb').read() execution = wps.execute(None, [], request=request, response=response) # Check execution result assert execution.status == 'ProcessSucceeded' assert execution.url == 'http://rsg.pml.ac.uk/wps/vector.cgi' assert execution.statusLocation == \ 'http://rsg.pml.ac.uk/wps/wpsoutputs/pywps-132084838963.xml' assert execution.serviceInstance == \ 'http://rsg.pml.ac.uk/wps/vector.cgi?service=WPS&request=GetCapabilities&version=1.0.0' assert execution.version == '1.0.0' # check single output output = execution.processOutputs[0] assert output.identifier == 'output' assert output.title == 'Name for output vector map' assert output.mimeType == 'text/xml' assert output.dataType == 'ComplexData' assert output.reference is None response = output.data[0] should_return = '''<ns3:FeatureCollection xmlns:ns3="http://ogr.maptools.org/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ns0="http://www.opengis.net/wps/1.0.0" xsi:schemaLocation="http://ogr.maptools.org/ output_0n7ij9D.xsd">\n\t\t\t\t\t <gml:boundedBy xmlns:gml="http://www.opengis.net/gml">\n\t\t\t\t\t <gml:Box>\n\t\t\t\t\t <gml:coord><gml:X>-960123.1421801626</gml:X><gml:Y>4665723.56559387</gml:Y></gml:coord>\n\t\t\t\t\t <gml:coord><gml:X>-101288.6510608822</gml:X><gml:Y>5108200.011823481</gml:Y></gml:coord>\n\t\t\t\t\t </gml:Box>\n\t\t\t\t\t </gml:boundedBy> \n\t\t\t\t\t <gml:featureMember xmlns:gml="http://www.opengis.net/gml">\n\t\t\t\t\t <ns3:output fid="F0">\n\t\t\t\t\t <ns3:geometryProperty><gml:LineString><gml:coordinates>-960123.142180162365548,4665723.565593870356679,0 -960123.142180162365548,4665723.565593870356679,0 -960123.142180162598379,4665723.565593870356679,0 -960123.142180162598379,4665723.565593870356679,0 -711230.141176006174646,4710278.48552671354264,0 -711230.141176006174646,4710278.48552671354264,0 -623656.677859728806652,4848552.374973464757204,0 -623656.677859728806652,4848552.374973464757204,0 -410100.337491964863148,4923834.82589447684586,0 -410100.337491964863148,4923834.82589447684586,0 -101288.651060882242746,5108200.011823480948806,0 -101288.651060882242746,5108200.011823480948806,0 -101288.651060882257298,5108200.011823480948806,0 -101288.651060882257298,5108200.011823480948806,0</gml:coordinates></gml:LineString></ns3:geometryProperty>\n\t\t\t\t\t <ns3:cat>1</ns3:cat>\n\t\t\t\t\t <ns3:id>1</ns3:id>\n\t\t\t\t\t <ns3:fcat>0</ns3:fcat>\n\t\t\t\t\t <ns3:tcat>0</ns3:tcat>\n\t\t\t\t\t <ns3:sp>0</ns3:sp>\n\t\t\t\t\t <ns3:cost>1002619.181</ns3:cost>\n\t\t\t\t\t <ns3:fdist>0</ns3:fdist>\n\t\t\t\t\t <ns3:tdist>0</ns3:tdist>\n\t\t\t\t\t </ns3:output>\n\t\t\t\t\t </gml:featureMember>\n\t\t\t\t\t</ns3:FeatureCollection>''' # noqa assert compare_xml(should_return, response) is True
def complex_input_with_content(): """ use ComplexDataInput with a direct content """ print("\ncomplex_input_with_content ...") wps = WebProcessingService('http://localhost:8094/wps', verbose=verbose) processid = 'wordcount' textdoc = ComplexDataInput("ALICE was beginning to get very tired ...") # alice in wonderland inputs = [("text", textdoc)] # list of tuple (output identifier, asReference attribute, mimeType attribute) # when asReference or mimeType is None - the wps service will use its default option outputs = [("output",True,'some/mime-type')] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) # show status print('percent complete', execution.percentCompleted) print('status message', execution.statusMessage) for output in execution.processOutputs: print('identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference))
def run_wps(process_id,input,output): #choose the first wps engine my_engine = WebProcessingService('http://appsdev.hydroshare.org:8282/wps/WebProcessingService', verbose=False, skip_caps=True) my_engine.getcapabilities() my_process = my_engine.describeprocess(process_id) my_inputs = my_process.dataInputs input_names = [] #getting list of input for input1 in my_inputs: input_names.append(input1) #executing the process.. execution = my_engine.execute(process_id, input, output) request = execution.request #set store executeresponse to false request = request.replace('storeExecuteResponse="true"', 'storeExecuteResponse="false"') url_wps = 'http://appsdev.hydroshare.org:8282/wps/WebProcessingService' wps_request = urllib2.Request(url_wps,request) wps_open = urllib2.urlopen(wps_request) wps_read = wps_open.read() if 'href' in wps_read: tag = 'href="' location = wps_read.find(tag) new= wps_read[location+len(tag):len(wps_read)] tag2 = '"/>\n </wps:Output>\n </wps:ProcessOutputs>\n</wps:' location2 = new.find(tag2) final = new[0:location2] split = final.split() wps_request1 = urllib2.Request(split[0]) wps_open1 = urllib2.urlopen(wps_request1) wps_read1 = wps_open1.read() #return [final_output_url, final_data] return [wps_read1, split]
def call_wps(args): if not args.token: user_id = args.user if not user_id: logging.error("No user id found on the call, aborting!") sys.exit(1) user_token_file = os.path.join("/etc/d4science/", user_id) with open(user_token_file, "r") as f: gcube_vre_token = f.read() else: gcube_vre_token = args.token.encode("utf-8") logging.info("User: %s", args.user) logging.info("Token: (SHA256) %s", hashlib.sha256(gcube_vre_token).hexdigest()) gcube_vre_token_header = {"gcube-token": gcube_vre_token} dataminer_url = ( "http://dataminer-prototypes.d4science.org/wps/" "WebProcessingService" ) wps = WebProcessingService(dataminer_url, headers=gcube_vre_token_header) process_id = args.process process = wps.describeprocess(process_id) inputs = build_inputs(process, args.input, args.inputdata, gcube_vre_token) outputs = [(o.identifier, True) for o in process.processOutputs] # execute the process execution = wps.execute(process_id, inputs, outputs) monitorExecution(execution, sleepSecs=5, download=True) logging.info("Execution status: %s", execution.status) exit_code = 0 if execution.status == "ProcessSucceeded" else 1 logging.info("Exit code: %d", exit_code) produce_output(execution, args.output, args.outdir, gcube_vre_token_header) return exit_code
def multiple_outputs(): print("\nmultiple outputs ...") # get multiple outputs wps = WebProcessingService('http://localhost:8094/wps', verbose=verbose) processid = 'dummyprocess' inputs = [("input1", '1'), ("input2", '2')] # list of tuple (output identifier, asReference attribute) outputs = [("output1",True), ("output2",False)] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) # show status print('percent complete', execution.percentCompleted) print('status message', execution.statusMessage) # outputs for output in execution.processOutputs: print('identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference)) # errors print(execution.status) for error in execution.errors: print(error.code, error.locator, error.text)
def _generateRequest(dataSetURI, algorithm, method, varID, verbose): """ Takes a dataset uri, algorithm, method, and datatype. This function will generate a simple XML document to make the request specified. (Only works for ListOpendapGrids and GetGridTimeRange). Will return a list containing the info requested for (either data types or time range). """ POST = WebProcessingService(WPS_Service, verbose=verbose) xmlGen = gdpXMLGenerator() root = xmlGen.getXMLRequestTree(dataSetURI, algorithm, method, varID, verbose) request = etree.tostring(root) execution = POST.execute(None, [], request=request) _execute_request._check_for_execution_errors(execution) if method == 'getDataSetTime': seekterm = '{xsd/gdptime-1.0.xsd}time' elif method == 'getDataType': seekterm = '{xsd/gdpdatatypecollection-1.0.xsd}name' elif method == 'getDataLongName': seekterm = '{xsd/gdpdatatypecollection-1.0.xsd}description' elif method == 'getDataUnits': seekterm = '{xsd/gdpdatatypecollection-1.0.xsd}unitsstring' return _parseXMLNodesForTagText(execution.response, seekterm)
def complex_input_with_reference(): """ use ComplexDataInput with a reference to a document """ print("\ncomplex_input_with_reference ...") wps = WebProcessingService('http://localhost:8094/wps', verbose=verbose) processid = 'wordcount' textdoc = ComplexDataInput( "http://www.gutenberg.org/files/28885/28885-h/28885-h.htm" ) # alice in wonderland inputs = [("text", textdoc)] # list of tuple (output identifier, asReference attribute) outputs = [("output", True)] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) # show status print('percent complete', execution.percentCompleted) print('status message', execution.statusMessage) for output in execution.processOutputs: print('identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference))
def complex_input_with_content(): """ use ComplexDataInput with a direct content """ print("\ncomplex_input_with_content ...") wps = WebProcessingService('http://localhost:8094/wps', verbose=verbose) processid = 'wordcount' textdoc = ComplexDataInput( "ALICE was beginning to get very tired ...") # alice in wonderland inputs = [("text", textdoc)] # list of tuple (output identifier, asReference attribute) outputs = [("output", True)] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) # show status print('percent complete', execution.percentCompleted) print('status message', execution.statusMessage) for output in execution.processOutputs: print('identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference))
def _generateRequest(self, dataSetURI, algorithm, method, varID=None, verbose=False): """ Takes a dataset uri, algorithm, method, and datatype. This function will generate a simple XML document to make the request specified. (Only works for ListOpendapGrids and GetGridTimeRange). Will return a list containing the info requested for (either data types or time range). """ wps_Service = 'http://cida.usgs.gov/gdp/utility/WebProcessingService' POST = WebProcessingService(wps_Service, verbose=False) xmlGen = gdpXMLGenerator() root = xmlGen.getXMLRequestTree(dataSetURI, algorithm, method, varID, verbose) # change standard output to not display waiting status if not verbose: old_stdout = sys.stdout result = StringIO() sys.stdout = result request = etree.tostring(root) execution = POST.execute(None, [], request=request) if method == 'getDataSetTime': seekterm = 'time' else: seekterm = 'name' if not verbose: sys.stdout = old_stdout return self._parseXMLNodesForTagText(execution.response, seekterm)
async def fimex_transfer(*, user_id: str, email: EmailStr, uri: str, wps_url: str, reducetime_start: str = None, reducetime_end: str = None, interpolate_proj_string: str = None, interpolate_method: str = None, select_variables: str, interpolate_xaxis_min: str = None, interpolate_xaxis_max: str = None, interpolate_yaxis_min: str = None, interpolate_yaxis_max: str = None, interpolate_xaxis_units: str = None, interpolate_yaxis_units: str = None, reducebox_east: str, reducebox_south: str, reducebox_west: str, reducebox_north: str, interpolate_hor_steps: str = None, inputtype: str, outputtype: str, background_tasks: BackgroundTasks): input_files = uri.split(",") fimex_list = [] for input_file in input_files: print(input_file) fimex_list.append( Fimex(wps_url, input_file, reducetime_start, reducetime_end, interpolate_proj_string, interpolate_method, select_variables, interpolate_xaxis_min, interpolate_xaxis_max, interpolate_yaxis_min, interpolate_yaxis_max, interpolate_xaxis_units, interpolate_yaxis_units, reducebox_east, reducebox_south, reducebox_west, reducebox_north, interpolate_hor_steps, inputtype, outputtype)) # wps=http://localhost:5000/wps?request=GetCapabilities&service=WPS # input_file = 'http://OpeDAP-server/thredds/dodsC/NBS/S2B/2018/02/18/S2B_MSIL1C_20180218T110109_N0206_R094_T33WWS_20180218T144023.nc' # wps=http://localhost:5000/cgi-bin/pywps.cgi?service=wps&version=1.0.0&request=getcapabilities wps = WebProcessingService(wps_url, verbose=False, skip_caps=True) config = confuse.Configuration('Basket', __name__) transaction = Transaction(str(uuid.uuid4()), user_id, email, status.Status.ORDERED, "nordatanet", fimex_list) solr_client = SolrClient(config['solr']['endpoint'].get(), "basket") solr_client.update(transaction.toSolrDocument()) try: for fimex in fimex_list: execution = wps.execute('transformation', fimex.input_map(), output=fimex.output_map()) background_tasks.add_task(doFinal, execution, email, transaction) print(execution.statusLocation) except requests.exceptions.ConnectionError as ce: raise HTTPException(status_code=502, detail="Failed to establish a connection") return transaction.toJson()
def _executeRequest(self, processid, inputs, output, verbose): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) old_stdout = sys.stdout # create StringIO() for listening to print result = StringIO() if not verbose: # redirect standard output sys.stdout = result execution = wps.execute(processid, inputs, output) monitorExecution(execution, download=False) # monitors for success # redirect standard output after successful execution sys.stdout = result monitorExecution(execution, download=True) result_string = result.getvalue() output = result_string.split('\n') tmp = output[len(output) - 2].split(' ') sys.stdout = old_stdout return tmp[len(tmp)-1]
def uploadShapeFile(filePath): """ Given a file, this function encodes the file and uploads it onto geoserver. """ # encodes the file, opens it, reads it, and closes it # returns a filename in form of: filename_copy.zip filePath = _encodeZipFolder(filePath) if filePath is None: return filehandle = open(filePath, 'r') filedata = filehandle.read() filehandle.close() os.remove(filePath) # deletes the encoded file # this if for naming the file on geoServer filename = filePath.split("/") # gets rid of filepath, keeps only filename eg: file.zip filename = filename[len(filename) - 1] filename = filename.replace("_copy.zip", "") xml_gen = gdpXMLGenerator() root = xml_gen.getUploadXMLtree(filename, upload_URL, filedata) # now we have a complete XML upload request upload_request = etree.tostring(root) post = WebProcessingService(WPS_Service) execution = post.execute(None, [], request=upload_request) monitorExecution(execution) return "upload:" + filename
def _executeRequest(processid, inputs, output, verbose=True, outputFilePath=None, sleepSecs=10): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) execution = wps.execute(processid, inputs, output) sleepSecs = sleepSecs err_count = 1 while execution.isComplete() == False: try: monitorExecution(execution, sleepSecs, download=False) # monitors for success err_count = 1 except Exception: log.warning( 'An error occurred while checking status, checking again. Sleeping %d seconds...' % sleepSecs) err_count += 1 if err_count > WPS_attempts: raise Exception( 'The status document failed to return, status checking has aborted. There has been a network or server issue preventing the status document from being retrieved, the request may still be running. For more information, check the status url %s' % execution.statusLocation) sleep(sleepSecs) if outputFilePath == None: outputFilePath = 'gdp_' + processid.replace( '.', '-') + '_' + strftime("%Y-%m-%dT%H-%M-%S-%Z") done = False err_count = 1 while done == False: try: monitorExecution(execution, download=True, filepath=outputFilePath) done = True except Exception: log.warning( 'An error occurred while trying to download the result file, trying again.' ) err_count += 1 sleep(sleepSecs) if err_count > WPS_attempts: raise Exception( "The process completed successfully, but an error occurred while downloading the result. You may be able to download the file using the link at the bottom of the status document: %s" % execution.statusLocation) _check_for_execution_errors(execution) return outputFilePath
def _executeRequest(processid, inputs, output, verbose=True, outputFilePath=None, sleepSecs=10): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) execution = wps.execute(processid, inputs, output) sleepSecs = sleepSecs err_count = 1 while execution.isComplete() == False: try: monitorExecution( execution, sleepSecs, download=False) # monitors for success err_count = 1 except Exception: log.warning( 'An error occurred while checking status, checking again. Sleeping %d seconds...' % sleepSecs) err_count += 1 if err_count > WPS_attempts: raise Exception( 'The status document failed to return, status checking has aborted. There has been a network or server issue preventing the status document from being retrieved, the request may still be running. For more information, check the status url %s' % execution.statusLocation) sleep(sleepSecs) if outputFilePath == None: outputFilePath = 'gdp_' + processid.replace( '.', '-') + '_' + strftime("%Y-%m-%dT%H-%M-%S-%Z") done = False err_count = 1 while done == False: try: monitorExecution(execution, download=True, filepath=outputFilePath) done = True except Exception: log.warning( 'An error occurred while trying to download the result file, trying again.' ) err_count += 1 sleep(sleepSecs) if err_count > WPS_attempts: raise Exception( "The process completed successfully, but an error occurred while downloading the result. You may be able to download the file using the link at the bottom of the status document: %s" % execution.statusLocation) _check_for_execution_errors(execution) return outputFilePath
def _execute(self): url_execute = "http://localhost:{}/wps".format(self.port) inputs = get_inputs(self.job.wps_request.inputs) output = get_output(self.job.wps_request.outputs) wps = WPS(url=url_execute, skip_caps=True) self.execution = wps.execute(self.job.wps_request.identifier, inputs=inputs, output=output, mode=self.job.process.async_)
def execute_workflow(self, userid, url, workflow): registry = app.conf['PYRAMID_REGISTRY'] db = mongodb(registry) # generate and run dispel workflow # TODO: fix owslib wps for unicode/yaml parameters logger.debug('workflow=%s', workflow) # using secure url workflow['worker']['url'] = secure_url(db, workflow['worker']['url'], userid) inputs=[('workflow', json.dumps(workflow))] logger.debug('inputs=%s', inputs) outputs=[('output', True), ('logfile', True)] wps = WebProcessingService(url=secure_url(db, url, userid), skip_caps=True, verify=False) worker_wps = WebProcessingService(url=workflow['worker']['url'], skip_caps=False, verify=False) execution = wps.execute(identifier='workflow', inputs=inputs, output=outputs) job = add_job(db, userid, task_id = self.request.id, is_workflow = True, service = worker_wps.identification.title, title = workflow['worker']['identifier'], abstract = '', status_location = execution.statusLocation) while execution.isNotComplete(): try: execution.checkStatus(sleepSecs=3) job['status'] = execution.getStatus() job['status_message'] = execution.statusMessage job['is_complete'] = execution.isComplete() job['is_succeded'] = execution.isSucceded() job['progress'] = execution.percentCompleted duration = datetime.now() - job.get('created', datetime.now()) job['duration'] = str(duration).split('.')[0] if execution.isComplete(): job['finished'] = datetime.now() if execution.isSucceded(): for output in execution.processOutputs: if 'output' == output.identifier: result = yaml.load(urllib.urlopen(output.reference)) job['worker_status_location'] = result['worker']['status_location'] job['progress'] = 100 log(job) else: job['status_message'] = '\n'.join(error.text for error in execution.errors) for error in execution.errors: log_error(job, error) else: log(job) except: logger.exception("Could not read status xml document.") else: db.jobs.update({'identifier': job['identifier']}, job) return execution.getStatus()
def test_wps_pelican_subset(): headers = {'COMPUTE-TOKEN': 'TOKEN'} wps = WebProcessingService(url='http://localhost:5000/wps', headers=headers, verify=True) d0 = Domain(dict(time=Dimension(0, 1, crs='indices'))) v0 = Variable(uri=TEST_SU_OPENDAP, var_name='su') exec = wps.execute('pelican_subset', inputs=[('domain', Domains([d0])), ('variable', Variables([v0]))], mode=SYNC) assert exec.isSucceded()
def run_wps(process_id,input,output): #choose the first wps engine #my_engine = WebProcessingService('http://appsdev.hydroshare.org:8282/wps/WebProcessingService', verbose=False, skip_caps=True) my_engine = WebProcessingService('http://appsdev.hydroshare.org:8282/wps/WebProcessingService',verbose=False, skip_caps=True) my_engine.getcapabilities() #wps_engines = list_wps_service_engines() #my_engine = wps_engines[0] #choose the r.time-series-converter my_process = my_engine.describeprocess(process_id) my_inputs = my_process.dataInputs input_names = [] #getting list of input for input1 in my_inputs: input_names.append(input1) #executing the process.. execution = my_engine.execute(process_id, input, output) request = execution.request #set store executeresponse to false request = request.replace('storeExecuteResponse="true"', 'storeExecuteResponse="false"') url_wps = 'http://appsdev.hydroshare.org:8282/wps/WebProcessingService' wps_request = urllib2.Request(url_wps,request) wps_open = urllib2.urlopen(wps_request) wps_read = wps_open.read() if 'href' in wps_read: tag = 'href="' location = wps_read.find(tag) new= wps_read[location+len(tag):len(wps_read)] tag2 = '"/>\n </wps:Output>\n </wps:ProcessOutputs>\n</wps:' location2 = new.find(tag2) final = new[0:location2] split = final.split() wps_request1 = urllib2.Request(split[0]) wps_open1 = urllib2.urlopen(wps_request1) wps_read1 = wps_open1.read() #now we must use our own method to send the request1 #we need to use the request #this code is for the normal wps which is not working right now # monitorExecution(execution) # output_data = execution.processOutputs # final_output_url = output_data[0].reference # final_data = read_final_data(final_output_url) #return [final_output_url, final_data] return [wps_read1, split]
def _executeRequest(self, processid, inputs, output, verbose): """ This function makes a call to the Web Processing Service with the specified user inputs. """ wps = WebProcessingService(WPS_URL) old_stdout = sys.stdout # create StringIO() for listening to print result = StringIO() if not verbose: # redirect standard output sys.stdout = result execution = wps.execute(processid, inputs, output) sleepSecs=10 err_count=1 while execution.isComplete()==False: try: monitorExecution(execution, sleepSecs, download=False) # monitors for success err_count=1 except Exception: print 'An error occurred while checking status, checking again.' print 'Sleeping %d seconds...' % sleepSecs err_count+=1 if err_count > WPS_attempts: raise Exception('The status document failed to return, status checking has aborted. There has been a network or server issue preventing the status document from being retrieved, the request may still be running. For more information, check the status url %s' % execution.statusLocation) sleep(sleepSecs) # redirect standard output after successful execution sys.stdout = result done=False err_count=1 while done==False: try: monitorExecution(execution, download=True) done=True except Exception: print 'An error occurred while trying to download the result file, trying again.' err_count+=1 if err_count > WPS_attempts: raise Exception("The process completed successfully, but an error occurred while downloading the result. You may be able to download the file using the link at the bottom of the status document: %s" % execution.statusLocation) sleep(sleepSecs) result_string = result.getvalue() output = result_string.split('\n') tmp = output[len(output) - 2].split(' ') sys.stdout = old_stdout return tmp[len(tmp)-1]
def get_usage(site, time): wps = WebProcessingService(url=URLS[site]) resp = wps.execute(identifier="usage", inputs=[("time", time)], mode=SYNC) # requests df = pd.read_csv(resp.processOutputs[0].reference, parse_dates=["time_start", "time_end"]) df["site"] = site df["URL"] = URLS[site] # downloads df_downloads = pd.read_csv(resp.processOutputs[1].reference, parse_dates=["datetime"]) df_downloads["site"] = site df_downloads["URL"] = URLS[site] return df, df_downloads
def run_wps(res_ids,gap): if (gap ==''): gap = "linear" # checks if there is two resource IDs resources = res_ids.split("_") process_id = 'org.n52.wps.server.r.series_gap_filler_3' process_input = [('resource_id',str(resources[0])),('fill_function',str(gap))] #setting the WPS URL is set in app.py url_wps = GapFillerTool.wps_url + '/WebProcessingService' my_engine = WebProcessingService(url_wps, verbose=False, skip_caps=True) my_process = my_engine.describeprocess(process_id) #executing the process.. # build execution execution = WPSExecution(version=my_engine.version, url=my_engine.url, username=my_engine.username, password=my_engine.password, verbose=my_engine.verbose) requestElement = execution.buildRequest(process_id, process_input, 'output') request = etree.tostring(requestElement) #set store executeresponse to false request = request.replace('storeExecuteResponse="true"', 'storeExecuteResponse="false"') execution = my_engine.execute(process_id, process_input, 'output', request) monitorExecution(execution) status = execution.status # if the status is successful... if status == 'ProcessSucceeded': outputs = execution.processOutputs output0 = outputs[0] reference0 = output0.reference # retrieve the data from the reference output_data = requests.get(reference0) resp = HttpResponse(output_data, content_type="application/json") return resp else: return JsonResponse({'status': 'wps request failed'})
def test_wps_execute_invalid_request(): # Initialize WPS client wps = WebProcessingService('http://cida.usgs.gov/gdp/process/WebProcessingService') # Submit fake invocation of Execute operation using cached HTTP request and response request = open(resource_file('wps_USGSExecuteInvalidRequest.xml'), 'rb').read() response = open(resource_file('wps_USGSExecuteInvalidRequestResponse.xml'), 'rb').read() execution = wps.execute(None, [], request=request, response=response) assert execution.isComplete() is True # Display errors ex = execution.errors[0] assert ex.code is None assert ex.locator is None assert ex.text == 'Attribute null not found in feature collection'
def test_wps_response_local_file(tmpdir): # Build WPS object; service has been down for some time so skip caps here wps = WebProcessingService('http://localhost', skip_caps=True) # Write dummy output file out_fn = tmpdir / "output.txt" content = 'hi there' out_fn.write_text(content, encoding="utf8") # Execute fake WPS invocation response = open(resource_file('wps_DummyExecuteResponseLocalFile.xml'), 'r').read() execution = wps.execute(None, [], response=response.format(tmpdir=str(tmpdir))) # Retrieve data from local file system out = execution.processOutputs[0] txt = out.retrieveData() assert txt == content
def run_wps(res_ids): print"launch wps" print res_ids # checks if there is two resource IDs resources = res_ids.split("_") if len(resources) < 2: return JsonResponse({'status': 'wps request failed. 2 resources are required, found ' + str(len(resources))}) process_id = 'org.n52.wps.server.r.linear_regression' process_input = [('x_resource_id',str(resources[0])),('y_resource_id',str(resources[1]))] #setting the WPS URL is set in app.py url_wps = CorrelationPlot.wps_url + '/WebProcessingService' my_engine = WebProcessingService(url_wps, verbose=False, skip_caps=True) my_process = my_engine.describeprocess(process_id) #executing the process.. # build execution execution = WPSExecution(version=my_engine.version, url=my_engine.url, username=my_engine.username, password=my_engine.password, verbose=my_engine.verbose) requestElement = execution.buildRequest(process_id, process_input, 'output') request = etree.tostring(requestElement) #set store executeresponse to false request = request.replace('storeExecuteResponse="true"', 'storeExecuteResponse="false"') print request execution = my_engine.execute(process_id, process_input, 'output', request) monitorExecution(execution) status = execution.status print status # if the status is successful... if status == 'ProcessSucceeded': outputs = execution.processOutputs output0 = outputs[0] reference0 = output0.reference # retrieve the data from the reference output_data = requests.get(reference0) resp = HttpResponse(output_data, content_type="application/json") return resp else: return JsonResponse({'status': 'wps request failed'})
def flyGssha(link,resultsFile): ''' This function submits the link to the zipped GSSHA file and gets the result ''' wps = WebProcessingService('http://ci-water.byu.edu:9999/wps/WebProcessingService', verbose=False, skip_caps=True) processid = 'rungssha' inputs = [('url', link)] output = "outputfile" execution = wps.execute(processid, inputs, output) monitorExecution(execution) print "Pre-execution" result = execution.getOutput(resultsFile) print "GSSHA has taken off!"
def execute(identifier, inputs=[], wps_host=None, wps_client=None, version='1.0.0'): if wps_host: wps = WebProcessingService(wps_host, version) return wps.execute(identifier, inputs=inputs) else: y = '' for data_input in inputs: y += '{0}={1};'.format(data_input[0], data_input[1]) y = y[:-1] response = wps_client.get( ('?service=WPS&request=execute&version={0}&' 'identifier={1}&DataInputs={2}').format(version, identifier, y)) wps_reader = WPSReader() element = wps_reader.readFromString(response.get_data()) execution = WPSExecution() execution._parseExecuteResponse(element) return execution
def run_sc(request): message = "" try: if request.POST: outlet_x = request.POST.get("outlet_x", None) outlet_y = request.POST.get("outlet_y", None) dam_height = request.POST.get("dam_height", None) watershed_geojson = request.POST.get("watershed_geojson", None) # Run reservoircalculation service #wps = WebProcessingService('https://tethys-staging.byu.edu/tethys_wps/?', verbose=False) wps = WebProcessingService('http://127.0.0.1:8000/tethys_wps/?', verbose=False) processid = 'reservoircalculationprocess' inputs=[("point_x",outlet_x), ("point_y",outlet_y), ("water_level", dam_height), ("max_boundary", watershed_geojson)] execution = wps.execute(processid, inputs, output="lake_volume") monitorExecution(execution) # extract watershed geojson lake_GEOJSON = execution.processOutputs[1].data[0] lake_volume = execution.processOutputs[0].data[0] status = execution.status # Check results if lake_GEOJSON is not None: message += str(status) else: message += str(status) else: raise Exception("Please call this service in a POST request.") except Exception as ex: message = ex.message return JsonResponse({"lake_GEOJSON":lake_GEOJSON, "lake_volume":lake_volume, "message":message})
def esgf_logon(self, userid, url, openid, password): registry = app.conf['PYRAMID_REGISTRY'] inputs = [] inputs.append( ('openid', openid.encode('ascii', 'ignore')) ) inputs.append( ('password', password.encode('ascii', 'ignore')) ) outputs = [('output',True),('expires',False)] wps = WebProcessingService(url=url, skip_caps=True) execution = wps.execute(identifier="esgf_logon", inputs=inputs, output=outputs) monitorExecution(execution) if execution.isSucceded(): credentials = execution.processOutputs[0].reference cert_expires = execution.processOutputs[1].data[0] db = mongodb(registry) user = db.users.find_one({'identifier':userid}) user['credentials'] = credentials user['cert_expires'] = cert_expires db.users.update({'identifier':userid}, user) return execution.status
def test_wps_execute(): wps = WebProcessingService('http://cida.usgs.gov/gdp/process/WebProcessingService') # Execute fake invocation of Execute operation using cached HTTP request and response request = open(resource_file('wps_USGSExecuteRequest1.xml'), 'rb').read() response = open(resource_file('wps_USGSExecuteResponse1a.xml'), 'rb').read() execution = wps.execute(None, [], request=request, response=response) assert execution.status == 'ProcessStarted' assert execution.isComplete() is False # Simulate end of process response = open(resource_file('wps_USGSExecuteResponse1b.xml'), 'rb').read() execution.checkStatus(sleepSecs=0, response=response) assert execution.status == 'ProcessSucceeded' assert execution.isComplete() is True # Display location of process output output = execution.processOutputs[0] assert output.reference == \ 'http://cida.usgs.gov/climate/gdp/process/RetrieveResultServlet?id=1318528582026OUTPUT.601bb3d0-547f-4eab-8642-7c7d2834459e' # noqa
def flyGssha(link, resultsFile): ''' This function submits the link to the zipped GSSHA file and gets the result ''' wps = WebProcessingService( 'http://ci-water.byu.edu:9999/wps/WebProcessingService', verbose=False, skip_caps=True) processid = 'rungssha' inputs = [('url', link)] output = "outputfile" execution = wps.execute(processid, inputs, output) monitorExecution(execution) print "Pre-execution" result = execution.getOutput(resultsFile) print "GSSHA has taken off!"
def run_wd(request): message = "" try: if request.GET: xlon = request.GET.get("xlon", None) ylat = request.GET.get("ylat", None) # Run watersheddelineationprocess service #wps = WebProcessingService('https://tethys-staging.byu.edu/tethys_wps/?', verbose=False) wps = WebProcessingService('http://127.0.0.1:8000/tethys_wps/?', verbose=False) processid = 'watersheddelineationprocess' inputs=[("outlet_x",xlon), ("outlet_y",ylat)] execution = wps.execute(processid, inputs, output="message") monitorExecution(execution) # extract watershed geojson watershed_GEOJSON = execution.processOutputs[1].data[0] snappoint_GEOJSON = execution.processOutputs[2].data[0] status = execution.status # Check results if watershed_GEOJSON is not None: message += str(status) else: message += str(status) else: raise Exception("Please call this service in a GET request.") except Exception as ex: message = ex.message print ex print ex.message return JsonResponse({"watershed_GEOJSON":watershed_GEOJSON, "snappoint_GEOJSON":snappoint_GEOJSON, "message":message})
def run(self): responseToReturn = Response() if self.identifier != "" and len(self.inputs) > 0: try: wps = WebProcessingService(self.url) print(self.inputs) execution = wps.execute(self.identifier, self.inputs) # TODO place in some temporary space file_path = '/tmp/out.zip' execution.getOutput(file_path) responseToReturn.status = 200 responseToReturn.filepath = file_path # myinputs = [('input', cdi), ('return_period', 'N2,N5,N10'), ('rainlength', '120')] # execution = self.wps.execute('d-rain-shp', myinputs) # execution.getOutput('/tmp/out.zip') except Exception as e: responseToReturn.status = 500 responseToReturn.data = str(e) else: responseToReturn.status = 500 self.statusChanged.emit(responseToReturn)
def execute_process(self, userid, url, identifier, inputs, outputs, keywords=None): registry = app.conf['PYRAMID_REGISTRY'] db = mongodb(registry) wps = WebProcessingService(url=secure_url(db, url, userid), skip_caps=False, verify=False) execution = wps.execute(identifier, inputs=inputs, output=outputs) job = add_job(db, userid, task_id = self.request.id, is_workflow = False, service = wps.identification.title, title = execution.process.identifier, abstract = getattr(execution.process, "abstract", ""), status_location = execution.statusLocation) while execution.isNotComplete(): try: execution.checkStatus(sleepSecs=3) job['status'] = execution.getStatus() job['status_message'] = execution.statusMessage job['is_complete'] = execution.isComplete() job['is_succeded'] = execution.isSucceded() job['progress'] = execution.percentCompleted duration = datetime.now() - job.get('created', datetime.now()) job['duration'] = str(duration).split('.')[0] if execution.isComplete(): job['finished'] = datetime.now() if execution.isSucceded(): job['progress'] = 100 log(job) else: job['status_message'] = '\n'.join(error.text for error in execution.errors) for error in execution.errors: log_error(job, error) else: log(job) except: logger.exception("Could not read status xml document.") else: db.jobs.update({'identifier': job['identifier']}, job) return execution.getStatus()
def run(self): responseToReturn = Response() if self.identifier != "" and len(self.inputs) > 0: try: wps = WebProcessingService(self.url) execution = wps.execute(self.identifier, self.inputs, output=[]) self.monitorExecution(execution) for output in execution.processOutputs: filePath = self.getFilePath(output.mimeType) responseToReturn.output[ output.identifier] = ResponseOutput( filePath, output.mimeType) execution.getOutput(filePath, output.identifier) responseToReturn.status = 200 except Exception as e: responseToReturn.status = 500 responseToReturn.data = str(e) else: responseToReturn.status = 500 self.statusChanged.emit(responseToReturn)
class RookWPS: def __init__(self, url): self.wps = WebProcessingService(url, verbose=False, skip_caps=True) def getcapabilities(self): self.wps.getcapabilities() return self.wps def describeprocess(self, identifier): return self.wps.describeprocess(identifier) def execute(self, identifier, inputs): outputs = [("output", True, None)] execution = self.wps.execute(identifier, inputs, output=outputs) monitorExecution(execution) print(execution.errors) assert execution.isSucceded() is True assert len(execution.processOutputs) > 0 ml_url = execution.processOutputs[0].reference xml = requests.get(ml_url).text urls = parse_metalink(xml) return urls
def uploadShapeFile(self, filePath): """ Given a file, this function encodes the file and uploads it onto geoserver. """ # encodes the file, opens it, reads it, and closes it # returns a filename in form of: filename_copy.zip filePath = self._encodeZipFolder(filePath) if filePath is None: return filehandle = open(filePath, 'r') filedata = filehandle.read() filehandle.close() os.remove(filePath) # deletes the encoded file # this if for naming the file on geoServer filename = filePath.split("/") # gets rid of filepath, keeps only filename eg: file.zip filename = filename[len(filename) - 1] filename = filename.replace("_copy.zip", "") # check to make sure a file with the same name does not exist fileCheckString = "upload:" + filename shapefiles = self.getShapefiles() if fileCheckString in shapefiles: print 'File exists already.' return xmlGen = gdpXMLGenerator() root = xmlGen.getUploadXMLtree(filename, upload_URL, filedata) # now we have a complete XML upload request uploadRequest = etree.tostring(root) POST = WebProcessingService(WPS_Service) execution = POST.execute(None, [], request=uploadRequest) monitorExecution(execution) return "upload:"+filename
def uploadShapeFile(self, filePath): """ Given a file, this function encodes the file and uploads it onto geoserver. """ # encodes the file, opens it, reads it, and closes it # returns a filename in form of: filename_copy.zip filePath = self._encodeZipFolder(filePath) if filePath is None: return filehandle = open(filePath, 'r') filedata = filehandle.read() filehandle.close() os.remove(filePath) # deletes the encoded file # this if for naming the file on geoServer filename = filePath.split("/") # gets rid of filepath, keeps only filename eg: file.zip filename = filename[len(filename) - 1] filename = filename.replace("_copy.zip", "") # check to make sure a file with the same name does not exist fileCheckString = "upload:" + filename shapefiles = self.getShapefiles() if fileCheckString in shapefiles: print 'File exists already.' return xmlGen = gdpXMLGenerator() root = xmlGen.getUploadXMLtree(filename, upload_URL, filedata) # now we have a complete XML upload request uploadRequest = etree.tostring(root) POST = WebProcessingService(WPS_Service) execution = POST.execute(None, [], request=uploadRequest) monitorExecution(execution) return "upload:" + filename
def execute(identifier, inputs=[], wps_host=None, wps_client=None, version='1.0.0'): """WPS execute response. Parameters ---------- identifer : string inputs : list of tuples wps_host : string wps_client : pywps.tests.WpsClient version : string Returns ------- out : list of ? """ if wps_host: wps = WebProcessingService(wps_host, version) return wps.execute(identifier, inputs=inputs) else: y = '' for data_input in inputs: y += '{0}={1};'.format(data_input[0], data_input[1]) y = y[:-1] response = wps_client.get( ('?service=WPS&request=execute&version={0}&' 'identifier={1}&DataInputs={2}').format(version, identifier, y)) wps_reader = WPSReader() element = wps_reader.readFromString(response.get_data()) execution = WPSExecution() execution._parseExecuteResponse(element) return execution
def complex_input_with_reference(): """ use ComplexDataInput with a reference to a document """ print("\ncomplex_input_with_reference ...") wps = WebProcessingService('http://localhost:8094/wps', verbose=verbose) processid = 'wordcount' textdoc = ComplexDataInput("http://www.gutenberg.org/files/28885/28885-h/28885-h.htm") # alice in wonderland inputs = [("text", textdoc)] # list of tuple (output identifier, asReference attribute) outputs = [("output",True)] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) # show status print('percent complete', execution.percentCompleted) print('status message', execution.statusMessage) for output in execution.processOutputs: print('identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference))
class WPSClient(object): """Returns a class where every public method is a WPS process available at the given url. Example: >>> emu = WPSClient(url='<server url>') >>> emu.hello('stranger') 'Hello stranger' """ def __init__( self, url, processes=None, converters=None, username=None, password=None, headers=None, auth=None, verify=True, cert=None, verbose=False, progress=False, version=WPS_DEFAULT_VERSION, caps_xml=None, desc_xml=None, language=None, ): """ Args: url (str): Link to WPS provider. config (Config): an instance processes: Specify a subset of processes to bind. Defaults to all processes. converters (dict): Correspondence of {mimetype: class} to convert this mimetype to a python object. username (str): passed to :class:`owslib.wps.WebProcessingService` password (str): passed to :class:`owslib.wps.WebProcessingService` headers (str): passed to :class:`owslib.wps.WebProcessingService` auth (requests.auth.AuthBase): requests-style auth class to authenticate, see https://2.python-requests.org/en/master/user/authentication/ verify (bool): passed to :class:`owslib.wps.WebProcessingService` cert (str): passed to :class:`owslib.wps.WebProcessingService` verbose (str): passed to :class:`owslib.wps.WebProcessingService` progress (bool): If True, enable interactive user mode. version (str): WPS version to use. language (str): passed to :class:`owslib.wps.WebProcessingService` ex: 'fr-CA', 'en_US'. """ self._converters = converters self._interactive = progress self._mode = ASYNC if progress else SYNC self._notebook = notebook.is_notebook() self._inputs = {} self._outputs = {} if not verify: import urllib3 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) if headers is None: headers = {} if auth is not None: if isinstance(auth, tuple) and len(auth) == 2: # special-case basic HTTP auth auth = requests.auth.HTTPBasicAuth(*auth) # We only need some headers from the requests.auth.AuthBase implementation # We prepare a dummy request, call the auth object with it, and get its headers dummy_request = requests.Request("get", "http://localhost") r = auth(dummy_request.prepare()) auth_headers = ["Authorization", "Proxy-Authorization", "Cookie"] headers.update( {h: r.headers[h] for h in auth_headers if h in r.headers}) self._wps = WebProcessingService(url, version=version, username=username, password=password, verbose=verbose, headers=headers, verify=verify, cert=cert, skip_caps=True, language=language) try: self._wps.getcapabilities(xml=caps_xml) except ServiceException as e: if "AccessForbidden" in str(e): raise UnauthorizedException( "You are not authorized to do a request of type: GetCapabilities" ) raise self._processes = self._get_process_description(processes, xml=desc_xml) # Build the methods for pid in self._processes: setattr(self, sanitize(pid), types.MethodType(self._method_factory(pid), self)) self.logger = logging.getLogger('WPSClient') if progress: self._setup_logging() self.__doc__ = utils.build_wps_client_doc(self._wps, self._processes) @property def language(self): return self._wps.language @language.setter def language(self, value): self._wps.language = value @property def languages(self): return self._wps.languages def _get_process_description(self, processes=None, xml=None): """Return the description for each process. Sends the server a `describeProcess` request for each process. Parameters ---------- processes: str, list, None A process name, a list of process names or None (for all processes). Returns ------- OrderedDict A dictionary keyed by the process identifier of process descriptions. """ all_wps_processes = [p.identifier for p in self._wps.processes] if processes is None: if owslib.__version__ > '0.17.0': # Get the description for all processes in one request. ps = self._wps.describeprocess('all', xml=xml) return OrderedDict((p.identifier, p) for p in ps) else: processes = all_wps_processes # Check for invalid process names, i.e. not matching the getCapabilities response. process_names, missing = utils.filter_case_insensitive( processes, all_wps_processes) if missing: message = "These process names were not found on the WPS server: {}" raise ValueError(message.format(", ".join(missing))) # Get the description for each process. ps = [self._wps.describeprocess(pid, xml=xml) for pid in process_names] return OrderedDict((p.identifier, p) for p in ps) def _setup_logging(self): self.logger.setLevel(logging.INFO) import sys fh = logging.StreamHandler(sys.stdout) fh.setFormatter(logging.Formatter('%(asctime)s: %(message)s')) self.logger.addHandler(fh) def _method_factory(self, pid): """Create a custom function signature with docstring, instantiate it and pass it to a wrapper which will actually call the process. Parameters ---------- pid: str Identifier of the WPS process. Returns ------- func A Python function calling the remote process, complete with docstring and signature. """ process = self._processes[pid] required_inputs_first = sorted(process.dataInputs, key=sort_inputs_key) input_names = [] # defaults will be set to the function's __defaults__: # A tuple containing default argument values for those arguments that have defaults, # or None if no arguments have a default value. defaults = [] for inpt in required_inputs_first: input_names.append(sanitize(inpt.identifier)) if inpt.minOccurs == 0 or inpt.defaultValue is not None: default = inpt.defaultValue if inpt.dataType != "ComplexData" else None defaults.append(utils.from_owslib(default, inpt.dataType)) defaults = tuple(defaults) if defaults else None body = dedent(""" inputs = locals() inputs.pop('self') return self._execute('{pid}', **inputs) """).format(pid=pid) func_builder = FunctionBuilder( name=sanitize(pid), doc=utils.build_process_doc(process), args=["self"] + input_names, defaults=defaults, body=body, filename=__file__, module=self.__module__, ) self._inputs[pid] = {} if hasattr(process, "dataInputs"): self._inputs[pid] = OrderedDict( (i.identifier, i) for i in process.dataInputs) self._outputs[pid] = {} if hasattr(process, "processOutputs"): self._outputs[pid] = OrderedDict( (o.identifier, o) for o in process.processOutputs) func = func_builder.get_func() return func def _build_inputs(self, pid, **kwargs): """Build the input sequence from the function arguments.""" wps_inputs = [] for name, input_param in list(self._inputs[pid].items()): arg = kwargs.get(sanitize(name)) if arg is None: continue values = [ arg, ] if not isinstance(arg, (list, tuple)) else arg supported_mimetypes = [ v.mimeType for v in input_param.supportedValues ] for value in values: # if input_param.dataType == "ComplexData": seems simpler if isinstance(input_param.defaultValue, ComplexData): # Guess the mimetype of the input value mimetype, encoding = guess_type(value, supported_mimetypes) if encoding is None: encoding = input_param.defaultValue.encoding if isinstance(value, ComplexData): inp = value # Either embed the file content or just the reference. else: if utils.is_embedded_in_request(self._wps.url, value): # If encoding is None, this will return the actual encoding used (utf-8 or base64). value, encoding = embed(value, mimetype, encoding=encoding) else: value = fix_url(str(value)) inp = utils.to_owslib(value, data_type=input_param.dataType, encoding=encoding, mimetype=mimetype) else: inp = utils.to_owslib(value, data_type=input_param.dataType) wps_inputs.append((name, inp)) return wps_inputs def _execute(self, pid, **kwargs): """Execute the process.""" wps_inputs = self._build_inputs(pid, **kwargs) wps_outputs = [(o.identifier, "ComplexData" in o.dataType) for o in list(self._outputs[pid].values())] mode = self._mode if self._processes[pid].storeSupported else SYNC try: wps_response = self._wps.execute(pid, inputs=wps_inputs, output=wps_outputs, mode=mode) if self._interactive and self._processes[pid].statusSupported: if self._notebook: notebook.monitor(wps_response, sleep=.2) else: self._console_monitor(wps_response) except ServiceException as e: if "AccessForbidden" in str(e): raise UnauthorizedException( "You are not authorized to do a request of type: Execute") raise # Add the convenience methods of WPSResult to the WPSExecution class. This adds a `get` method. utils.extend_instance(wps_response, WPSResult) wps_response.attach(wps_outputs=self._outputs[pid], converters=self._converters) return wps_response def _console_monitor(self, execution, sleep=3): """Monitor the execution of a process. Parameters ---------- execution : WPSExecution instance The execute response to monitor. sleep: float Number of seconds to wait before each status check. """ import signal # Intercept CTRL-C def sigint_handler(signum, frame): self.cancel() signal.signal(signal.SIGINT, sigint_handler) while not execution.isComplete(): execution.checkStatus(sleepSecs=sleep) self.logger.info("{} [{}/100] - {} ".format( execution.process.identifier, execution.percentCompleted, execution.statusMessage[:50], )) if execution.isSucceded(): self.logger.info("{} done.".format(execution.process.identifier)) else: self.logger.info("{} failed.".format(execution.process.identifier))
# 1. test GetCapabilities query wps = WebProcessingService('https://rain1.fsv.cvut.cz/services/wps', skip_caps=True) wps.getcapabilities() print("Test 1: GetCapabilities -> list of processes:") for process in wps.processes: print(process.identifier) processId = 'd-rain-csv' # 2. test DescribeProcess query #process = wps.describeprocess(processId) #print ("Test 2: DescribeProcess -> list of parameters:") # for input in process.dataInputs: # print (input.identifier) # for output in process.processOutputs: # print (output.identifier) # 3. test Execute query print("Test 3: Execute") inputs = [("input", ComplexDataInput('http://rain.fsv.cvut.cz/geodata/test.gml')), ("keycolumn", "HLGP_ID"), ("return_period", "N2,N5,N10"), ("rainlength", "120")] execution = wps.execute(processId, inputs) outputFile = '/tmp/output.csv' execution.getOutput(outputFile) with open(outputFile) as fd: print(fd.readlines()) o
def qc_wizard_yaml(request): title = "Quality Control Wizard" user_id = authenticated_userid(request) token = "1234" # user_token(request, user_id) session_id_help = ("An identifier used to avoid processes running on the same directory." + " Using an existing one will remove all data inside its work directory.") session_ids = get_session_ids(user_id, request) if session_ids == []: session_id_help += " There are currently no existing Session IDs." else: session_id_help += " The existing Session IDs are:<br>" +", ".join(session_ids) yamllogs_help = "The comma separated list of logfile locations" oldprefix_help = "The data path in the provided logfiles" newprefix_help = "The data path on the machine" #a field in fields must contain text, id and value. The entry help is optional. #allowed_values can be used if a limited number of possibile values should be available. #In that case value will be used as default if it is in allowed_values. #For type "checkbox" the existence of the "checked" key will lead to the checkbox being True. fields = [ {"id": "quality_server_address", "type" : "text", "text": "URL to the Quality WPS", "value":DEFAULTQUALITYSERVER}, {"id": "session_id", "type": "text", "text": "Session ID", "help":session_id_help, "value": "checkdone"}, {"id": "yamllogs", "type": "text", "text": "YAML logs", "help": yamllogs_help, "value": ""}, {"id": "prefix_old", "type": "text", "text": "Old prefix", "help": oldprefix_help, "value": ""}, {"id": "prefix_new", "type": "text", "text": "New prefix", "help": newprefix_help, "value": ""}, {"id": "project", "type": "select", "text": "Project", "value": "CORDEX", "allowed_values": ["CORDEX"] }, {"id": "replica", "type": "checkbox", "text": "Replica", "value": ""}, {"id": "latest", "type": "checkbox", "text": "Latest", "value": "", "checked": "checked"}, {"id": "publish_metadata", "type": "checkbox", "text": "Publish meta-data", "value": "", "checked": "checked"}, {"id": "publish_quality", "type": "checkbox", "text": "Publish quality-data", "value": "", "checked": "checked"}, {"id": "clean", "type": "checkbox", "text": "Clean afterwards", "value": "", "help": "Removes the work data after the steps have finished"}, ] html_fields = get_html_fields(fields) if "submit" in request.POST: DATA = request.POST #shorten the method parameters by automatically insert DATA def getValue(identifier): return getValueStatic(DATA, identifier) def getBool(identifier): return getBoolStatic(DATA, identifier) ########################## #collect input parameters# ########################## username = str(user_id.replace("@","(at)")) token = token session_id = getValue("session_id") yamllogs = getValue("yamllogs") prefix_old = getValue("prefix_old") prefix_new = getValue("prefix_new") project = getValue("project") #html checkboxes are true if and only if they are in the POST (DATA variable) replica = getBool("replica") latest = getBool("latest") publish_metadata = getBool("publish_metadata") publish_quality = getBool("publish_quality") cleanup = getBool("clean") wps_address = getValue("quality_server_address") wps = WebProcessingService(wps_address) ################## #Run the wps call# ################## wps = request.wps identifier = "QC_Check_YAML" inputs = [("username", username), ("token", token), ("session_id", session_id), ("yamllogs", yamllogs), ("prefix_old", prefix_old), ("prefix_new", prefix_new), ("project", project), ("replica", replica), ("latest", latest), ("publish_metadata", publish_metadata), ("publish_quality", publish_quality), ("cleanup", cleanup)] outputs = [("process_log", True)] #wps.execute does not like empty strings as value, so filter it out inputs = [(x,y) for (x,y) in inputs if y!=""] g = open("/home/dkrz/k204205/log","w") g.write(str(inputs)+"\n") g.write(str(outputs)+"\n") g.write(str(identifier)+"\n") g.flush() execution = wps.execute(identifier, inputs=inputs, output=outputs) models.add_job( request = request, workflow = False, title = execution.process.title, wps_url = execution.serviceInstance, status_location = execution.statusLocation, notes = "test", tags = "test") return HTTPFound(location=request.route_url('jobs')) return { "title": title, "html_fields" : html_fields, }
try: wps = WebProcessingService(url=url, skip_caps=False, verify=False, headers=wps_headers(userid)) # TODO: complex type detection is currently broken due to pywps bug. outputs = [('output', True)] try: # TODO: sync is non-default if async is False: mode = SYNC else: mode = ASYNC execution = wps.execute(identifier=identifier, inputs=inputs, output=outputs, mode=mode, lineage=True) except Exception: LOGGER.warn( "Setting execution mode is not supported. Using default async mode." ) execution = wps.execute(identifier, inputs=inputs, output=outputs) # job['service'] = wps.identification.title # job['title'] = getattr(execution.process, "title") job['abstract'] = getattr(execution.process, "abstract") job['status_location'] = execution.statusLocation job['request'] = execution.request job['response'] = etree.tostring(execution.response) LOGGER.debug("job init done %s ...", self.request.id)
elif request == "DescribeProcess": if identifier is None: print('\nERROR: missing mandatory "-i (or --identifier)" argument') usage() sys.exit(4) process = wps.describeprocess(identifier) print("WPS Process: identifier=%s" % process.identifier) print("WPS Process: title=%s" % process.title) print("WPS Process: abstract=%s" % process.abstract) for input in process.dataInputs: print( "Process input: identifier=%s, data type=%s, minOccurs=%d, maxOccurs=%d" % (input.identifier, input.dataType, input.minOccurs, input.maxOccurs) ) for output in process.processOutputs: print("Process output: identifier=%s, data type=%s" % (output.identifier, output.dataType)) elif request == "Execute": if xml is None: print('\nERROR: missing mandatory "-x (or --xml)" argument') usage() sys.exit(5) execution = wps.execute(None, [], request=xml) monitorExecution(execution) else: print("\nERROR: Unknown request type") usage() sys.exit(6)
def run(self): # submit job verbose = True wps = WebProcessingService('http://cida.usgs.gov/climate/gdp/process/WebProcessingService', verbose=verbose) # formula for model data #dataset_id = "ensemble_%s_%s" % (self.job.dataset, self.job.index) # formula for observational data dataset_id = "gmo_%s" % self.job.index print 'dataset_id=%s' % dataset_id if self.job.index=='tmin-days_below_threshold': dataset_uri = 'dods://cida.usgs.gov/qa/thredds/dodsC/derivatives/derivative-days_below_threshold.tmin.ncml' elif self.job.index=='tmax-days_above_threshold': dataset_uri = 'dods://cida.usgs.gov/qa/thredds/dodsC/derivatives/derivative-days_above_threshold.tmax.ncml' elif self.job.index=='pr-days_above_threshold': dataset_uri = 'dods://cida.usgs.gov/qa/thredds/dodsC/derivatives/derivative-days_above_threshold.pr.ncml' else: raise Exception("Unrecognized index choice, cannot select dataset") print 'dataset_uri=%s' % dataset_uri # datetime processing #startDateTime = datetime.strptime(self.job.startDateTime, "%Y-%m-%d") startDateTime = self.job.startDateTime _startDateTime = startDateTime.isoformat()+".000Z" # FIXME: 5 year time span stopDateTime = datetime(startDateTime.year+5, startDateTime.month, startDateTime.day) _stopDateTime = stopDateTime.isoformat()+".000Z" wfsUrl = "http://cida.usgs.gov/climate/gdp/proxy/http://igsarm-cida-gdp2.er.usgs.gov:8082/geoserver/wfs" query = WFSQuery("sample:CSC_Boundaries", propertyNames=["the_geom","area_name"], filters=[self.job.region]) featureCollection = WFSFeatureCollection(wfsUrl, query) print 'outputFormat=%s' % self.job.outputFormat if self.job.outputFormat=='CSV': processid = 'gov.usgs.cida.gdp.wps.algorithm.FeatureWeightedGridStatisticsAlgorithm' inputs = [ ("FEATURE_ATTRIBUTE_NAME","the_geom"), ("DATASET_URI", dataset_uri.encode('utf8')), ("DATASET_ID", dataset_id.encode('utf8')), ("TIME_START", _startDateTime), ("TIME_END", _stopDateTime ), ("REQUIRE_FULL_COVERAGE","false"), ("DELIMITER","COMMA"), ("STATISTICS","MEAN"), ("GROUP_BY","STATISTIC"), ("SUMMARIZE_TIMESTEP","false"), ("SUMMARIZE_FEATURE_ATTRIBUTE","false"), ("FEATURE_COLLECTION", featureCollection) ] else: processid = 'gov.usgs.cida.gdp.wps.algorithm.FeatureCoverageOPeNDAPIntersectionAlgorithm' inputs = [ ("DATASET_URI", dataset_uri.encode('utf8')), ("DATASET_ID", dataset_id.encode('utf8')), ("TIME_START", _startDateTime), ("TIME_END", _stopDateTime ), ("REQUIRE_FULL_COVERAGE","false"), ("FEATURE_COLLECTION", featureCollection) ] output = "OUTPUT" # submit job execution = wps.execute(processid, inputs, output = "OUTPUT") self.job._update(execution, first=True) # keep monitoring till job completion while execution.isComplete()==False: execution.checkStatus(sleepSecs=4) self.job._update(execution) print 'Done'
class GenericWPS(MonitorPE): STATUS_NAME = 'status' STATUS_LOCATION_NAME = 'status_location' def __init__(self, url, identifier, resource='resource', inputs=[], output=None, headers=None): MonitorPE.__init__(self) self._add_output(self.STATUS_NAME) self._add_output(self.STATUS_LOCATION_NAME) self.wps = WebProcessingService(url=url, skip_caps=True, verify=False, headers=headers) self.identifier = identifier self.wps_resource = resource self.wps_inputs = inputs self.wps_output = output def progress(self, execution): return int(self._pstart + ((self._pend - self._pstart) / 100.0 * execution.percentCompleted)) def monitor_execution(self, execution): progress = self.progress(execution) self.monitor( "status_location={0.statusLocation}".format(execution), progress) while execution.isNotComplete(): try: execution.checkStatus(sleepSecs=3) except Exception: LOGGER.exception("Could not read status xml document.") else: progress = self.progress(execution) self.monitor(execution.statusMessage, progress) if execution.isSucceded(): for output in execution.processOutputs: self.monitor('ouput={0.identifier}'.format(output), progress) else: self.monitor('\n'.join( ['ERROR: {0.text} code={0.code} locator={0.locator})'. format(ex) for ex in execution.errors]), progress) def _build_wps_inputs(self): process = self.wps.describeprocess(self.identifier) complex_inpts = [] bbox_inpts = [] for inpt in process.dataInputs: if 'ComplexData' in inpt.dataType: complex_inpts.append(inpt.identifier) elif 'BoundingBoxData' in inpt.dataType: bbox_inpts.append(inpt.identifier) inputs = [] for inpt in self.wps_inputs: LOGGER.debug("input=%s", inpt) if inpt[0] in complex_inpts: inputs.append((inpt[0], ComplexDataInput(inpt[1]))) elif inpt[0] in bbox_inpts: inputs.append((inpt[0], BoundingBoxDataInput(inpt[1]))) else: inputs.append(inpt) return inputs def _build_wps_outputs(self): outputs = [] if self.wps_output is not None: outputs = [(self.wps_output, True)] return outputs def execute(self): LOGGER.debug("execute inputs=%s", self.wps_inputs) execution = self.wps.execute( identifier=self.identifier, inputs=self._build_wps_inputs(), output=self._build_wps_outputs(), lineage=True) self.monitor_execution(execution) result = {self.STATUS_NAME: execution.status, self.STATUS_LOCATION_NAME: execution.statusLocation} if execution.isSucceded(): # NOTE: only set workflow output if specific output was requested if self.wps_output is not None: for output in execution.processOutputs: if self.wps_output == output.identifier: result[self.OUTPUT_NAME] = output.reference break return result else: failure_msg = '\n'.join(['{0.text}'. format(ex) for ex in execution.errors]) raise Exception(failure_msg) def _set_inputs(self, inputs): if self.INPUT_NAME in inputs: for value in inputs[self.INPUT_NAME]: self.wps_inputs.append((self.wps_resource, value)) def process(self, inputs): try: result = self._process(inputs) if result is not None: return result except Exception: LOGGER.exception("process failed!") raise def _process(self, inputs): self._set_inputs(inputs) return self.execute()
from owslib.wps import WebProcessingService, monitorExecution verbose = True # get multiple outputs wps = WebProcessingService('http://rsg.pml.ac.uk/wps/generic.cgi', verbose=verbose) processid = 'dummyprocess' inputs = [("input1", '1'), ("input2", '2')] # list of tuple (output identifier, asReference attribute) outputs = [("output1",True), ("output2",False)] execution = wps.execute(processid, inputs, output=outputs) print execution.status # show status print 'percent complete', execution.percentCompleted print 'status message', execution.statusMessage monitorExecution(execution) for output in execution.processOutputs: print 'identifier=%s, dataType=%s, data=%s, reference=%s' % (output.identifier, output.dataType, output.data, output.reference) # get errors inputs = [("input1", '1'), ("input2", '3')] execution = wps.execute(processid, inputs, output=outputs) monitorExecution(execution) print execution.status for error in execution.errors: print error.code, error.locator, error.text
async=async, caption=caption) try: wps = WebProcessingService(url=url, skip_caps=False, verify=False, headers=wps_headers(userid)) # TODO: complex type detection is currently broken due to pywps bug. outputs = [('output', True)] try: # TODO: sync is non-default if async is False: mode = SYNC else: mode = ASYNC execution = wps.execute( identifier=identifier, inputs=inputs, output=outputs, mode=mode, lineage=True) except Exception: LOGGER.warn("Setting execution mode is not supported. Using default async mode.") execution = wps.execute(identifier, inputs=inputs, output=outputs ) # job['service'] = wps.identification.title # job['title'] = getattr(execution.process, "title") job['abstract'] = getattr(execution.process, "abstract") job['status_location'] = execution.statusLocation job['request'] = execution.request job['response'] = etree.tostring(execution.response)
featureCollection = GMLMultiPolygonFeatureCollection([polygon]) processid = 'gov.usgs.cida.gdp.wps.algorithm.FeatureWeightedGridStatisticsAlgorithm' inputs = [ ("FEATURE_ATTRIBUTE_NAME", "the_geom"), ("DATASET_URI", "dods://cida.usgs.gov/qa/thredds/dodsC/derivatives/derivative-days_above_threshold.pr.ncml" ), ("DATASET_ID", "ensemble_b1_pr-days_above_threshold"), ("TIME_START", "2010-01-01T00:00:00.000Z"), ("TIME_END", "2011-01-01T00:00:00.000Z"), ("REQUIRE_FULL_COVERAGE", "false"), ("DELIMITER", "COMMA"), ("STATISTICS", "MEAN"), ("GROUP_BY", "STATISTIC"), ("SUMMARIZE_TIMESTEP", "false"), ("SUMMARIZE_FEATURE_ATTRIBUTE", "false"), ("FEATURE_COLLECTION", featureCollection) ] output = "OUTPUT" execution = wps.execute(processid, inputs, output="OUTPUT") # alternatively, submit a pre-made request specified in an XML file #request = open('../tests/wps_USGSExecuteRequest1.xml','r').read() #execution = wps.execute(None, [], request=request) # The monitorExecution() function can be conveniently used to wait for the process termination # It will eventually write the process output to the specified file, or to the file specified by the server. monitorExecution(execution) ''' # 3b) Execute # Submits an HTTP POST "Execute" process request to the WPS service, keeps checking the status of the request, # and retrieves the output once the request terminates successfully (displaying any errors if found). # This request uses a FEATURE_COLLECTION input defined as a GML (lat, lon) polygon. polygon = [(-102.8184, 39.5273), (-102.8184, 37.418), (-101.2363, 37.418), (-101.2363, 39.5273), (-102.8184, 39.5273)] featureCollection = GMLMultiPolygonFeatureCollection( [polygon] )
def qc_wizard_check(request): title = "Quality Control Wizard" user_id = authenticated_userid(request) token = "1234"# user_token(request, user_id) if not token: raise Exception("Can not find token") session_id_help = ("An identifier used to avoid processes running on the same directory." + " Using an existing one will remove all data inside its directory.") session_ids = get_session_ids(user_id, request) if session_ids == []: session_id_help += " There are currently no existing Session IDs." else: session_id_help += " The existing Session IDs are:<br>" +", ".join(session_ids) qc_select_help = ("Comma separated list of parts of the path descriptions." + " If at least one description in the list matches the path is included." + " In the path description '.*' is for any character sequence. (e.g. " + "AFR-44/.*/tas, EUR.*, /fx/)") qc_lock_help = ("Works similar to select, but prevents the given paths being added. " + "Lock is stronger than select. (e.g. select tas and lock AFR-44 checks all "+ "paths with tas that do not contain AFR-44.)") #a field in fields must contain text, id and value. The entry help is optional. #allowed_values can be used if a limited number of possibile values should be available. #In that case value will be used as default if it is in allowed_values. #For type "checkbox" the existence of the "checked" key will lead to the checkbox being True. fields = [ {"id": "quality_server_address", "type" : "text", "text": "URL to the Quality WPS", "value":DEFAULTQUALITYSERVER}, {"id": "session_id", "type": "text", "text": "Session ID", "help":session_id_help, "value": "web1"}, #{"id": "irods_home", "type": "text", "text": "iRods Home", # "help": "The home directory of iRods", "value":"qc_dummy_DKRZ"}, #{"id": "irods_collection", "type": "text", "text": "iRods collection", # "help": "Name of the to analyze collection", "value": "qc_test_20140416"}, {"id": "data_path", "type": "text", "text": "Root path of the to check data", "value": ""}, {"id": "project", "type": "select", "text": "Project", "value": "CORDEX", "allowed_values": ["CORDEX"] }, {"id": "select", "type": "text", "text": "QC SELECT", "value": "", "help": qc_select_help}, {"id": "lock", "type": "text", "text": "QC LOCK", "value": "", "help": qc_lock_help}, {"id": "replica", "type": "checkbox", "text": "Replica", "value": ""}, {"id": "latest", "type": "checkbox", "text": "Latest", "value": "", "checked": "checked"}, {"id": "publish_metadata", "type": "checkbox", "text": "Publish meta-data", "value": "", "checked": "checked"}, {"id": "publish_quality", "type": "checkbox", "text": "Publish quality-data", "value": "", "checked": "checked"}, {"id": "clean", "type": "checkbox", "text": "Clean afterwards", "value": "", "help": "Removes the work data after the steps have finished"}, ] html_fields = get_html_fields(fields) if "submit" in request.POST: DATA = request.POST #shorten the method parameters by automatically insert DATA def getValue(identifier): return getValueStatic(DATA, identifier) def getBool(identifier): return getBoolStatic(DATA, identifier) ########################## #collect input parameters# ########################## wps_address = getValue("quality_server_address") username = str(user_id.replace("@","(at)")) token = token session_id = getValue("session_id") #irods_home = DATA["irods_home"] #irods_collection = DATA["irods_collection"] data_path = getValue("data_path") project = getValue("project") #ensure lock and select are valid values. select = getValue("select") lock = getValue("lock") #html checkboxes are true if and only if they are in the POST (DATA variable) replica = getBool("replica") latest = getBool("latest") publish_metadata = getBool("publish_metadata") publish_quality = getBool("publish_quality") cleanup = getBool("clean") ##################### #Run the wps call# ##################### wps = WebProcessingService(wps_address) identifier = "QC_Check_Full" inputs = [("username", username), ("token", token), ("session_id", session_id), #("irods_home", irods_home), ("irods_collection", irods_collection), ("data_path", data_path), ("project", project), ("select", select), ("lock", lock), ("replica", replica), ("latest", latest), ("publish_metadata", publish_metadata), ("publish_quality", publish_quality), ("cleanup", cleanup)] #filter empty string values, because wps.execute does not like them. inputs = [(x,y) for (x,y) in inputs if y!=""] outputs = [("process_log", True)] execution = wps.execute(identifier, inputs=inputs, output=outputs) models.add_job( request = request, workflow = False, title = execution.process.title, wps_url = execution.serviceInstance, status_location = execution.statusLocation, notes = "test", tags = "test") return HTTPFound(location=request.route_url('jobs')) return { "title": title, "html_fields" : html_fields, }