def buildLoop(preProcessChain): webhookUrl = APP.url + "/resources/processes/operations/update" log.debug("Registering own webhook endpoint: " + str(webhookUrl)) tpl = tplEnv.get_template('actiniaCore/pc_loop.json') class PCInputClass(): param = '' value = '' pcInputs = [] # Feature: transform input table information to grass connection string # try: # preProcessChainTableFeat = preProcessChain.feature_source.table # except AttributeError as e: # log.error(e) # log.error("Feature has no data source") # return # # if preProcessChainTableFeat is None: # log.error("Feature has no data source") # return # # connString = buildPCConnectionString(preProcessChainTableFeat) # feat_db = connString[0] # feat_layer = connString[1] proc = preProcessChain.procs[0] for input in proc.input: inputType = getattr(input, "type", None) if inputType == 'PARAMETER': pcInputObject = PCInputClass() pcInputObject.param = input.name path = ACTINIACORE.filestorage + '/' + input.value[0] pcInputObject.value = path pcInputs.append(pcInputObject) elif input.table: if input.name in ['a', 'b', 'c']: pcInputObject = PCInputClass() pcInputObject.param = input.name pcInputObject.value = input.value pcInputs.append(pcInputObject) else: log.error("Don't know what to do with input.") if (pcInputs is None or webhookUrl is None): log.error('Could not set all variables to replace in template.') return None postbody = tpl.render(inputs=pcInputs, webhookUrl=webhookUrl).replace('\n', '') return postbody
def post(self): log.debug("Received HTTP POST with process: \n" + json.dumps(request.get_json(force=True), indent=4, sort_keys=True)) try: fullResp = postActiniaCore( 'test', request.get_json(force=True) ) resp = shortenActiniaCoreResp(fullResp) if resp is None or fullResp['status'] == 'error': res = jsonify(SimpleStatusCodeResponseModel( status=500, message="failure" )) return make_response(res, 500) else: res = make_response(json.dumps(resp), 200) res.headers['Content-Type'] = 'application/json' return res except Exception: res = jsonify(SimpleStatusCodeResponseModel( status=500, message="failure")) return make_response(res, 500)
def buildPCS1Grd(preProcessChain): processType = preProcessChain.get('processType') if processType != 'preprocessing': log.error('process type is unknown') return None webhookUrl = APP.url + "/resources/processes/operations/update" log.debug("Registering own webhook endpoint: " + str(webhookUrl)) tpl = tplEnv.get_template('actiniaCore/pc_r.s1.grd_template.json') user = ACTINIACORE.esa_apihub_user pw = ACTINIACORE.esa_apihub_pw S1A_name = preProcessChain.get('title') raw_path = ACTINIACORE.filestorage + '/' + 'sentinel1/raw/' preprocessing_path = (ACTINIACORE.filestorage + '/' + 'sentinel1/preprocessing/') if (user is None or pw is None or S1A_name is None or raw_path is None or preprocessing_path is None or webhookUrl is None): log.error('Could not set all variables to replace in template.') return None postbody = tpl.render(user=user, pw=pw, S1A_name=S1A_name, raw_path=raw_path, preprocessing_path=preprocessing_path, webhookUrl=webhookUrl).replace('\n', '') return postbody
def checkConnection(url, name, expectedFormat): """ Method to test connection Args: url (string): url of resource to test. name (string): name of resource to test. Only used for logging expectedFormat (string): Format in which resource will respond. Can be 'xml' or 'json' """ # can be called by e.g. # checkConnection(GEONETWORK.csw_url, 'geonetwork', 'xml') log.debug('Testing connection to ' + url) try: resp = requests.get(url) except requests.exceptions.ConnectionError: log.error('Connection Error to ' + name) return None try: if expectedFormat == 'json': parsedresp = json.loads(resp.text) log.debug('Connection successfull to ' + name) return True except Exception: log.error('Connection Error to ' + name) return None
def getRecordsByTags(tags): """ Method to get records by tags from geonetwork Attention: The tags received are case sensitive! PropertyIsLike is sensitive, matchCase not possible here. See https://trac.osgeo.org/geonetwork/wiki/CSW202Improvements This method can be called by @app.route('/metadata/raw/tags/<tags>') """ log.debug('looking for tags ' + str(tags)) try: url = GEONETWORK.csw_url tpl = tplEnv.get_template('geonetwork/post_records_by_tags.xml') tags = tags.split(',') postbody = tpl.render(tags=tags).replace('\n', '') headers = {'content-type': 'application/xml; charset=utf-8'} except Exception as e: log.error('Could not set needed variable') log.error(e) return None try: gnosresp = requests.post( url, data=postbody, headers=headers, auth=auth(GEONETWORK) ) return gnosresp.content except requests.exceptions.ConnectionError: log.error('Could not connect to gnos') return None
def index(): try: return current_app.send_static_file('index.html') except werkzeug.exceptions.NotFound: log.debug('No index.html found in static folder. Serving backup.') return ("""<h1 style='color:red'>actinia-gdi</h1> <a href="latest/api/swagger.json">API docs</a>""")
def index(): try: return current_app.send_static_file('index.html') except werkzeug.exceptions.NotFound: log.debug('No index.html found in static folder. Serving backup.') # when actinia-gdi is installed in single mode, the swagger # endpoint would be "latest/api/swagger.json". As api docs exist in # single mode, use this fallback for plugin mode. return ("""<h1 style='color:red'>actinia GDI</h1> <a href="api/v1/swagger.json">API docs</a>""")
def update(uuid, utcnow): """ Method to update record in geonetwork """ connection = checkConnectionWithoutResponse('geonetwork') if connection is None: log.error('Not updating metadata for uuid ' + uuid) return None try: response = getRecordByUUID(uuid) doc = parseString(response.decode('utf-8')) recordNode = doc.getElementsByTagName('gmd:MD_Metadata')[0] log.debug('Found metadata to update for ' + uuid + ', and ' + str(recordNode)) except Exception: log.error('Could not find metadata record to update for uuid ' + uuid) return None record = updateXml(response, utcnow) if record is None: return None try: url = GEONETWORK.csw_pub postbodytpl = tplEnv.get_template('geonetwork/post_update_record.xml') postbody = postbodytpl.render(metadata_record=record, uuid=uuid).replace('\n', '') headers = {'content-type': 'application/xml; charset=utf-8'} except Exception as e: log.error('Could not set needed variable') log.error(e) return None try: log.info('Updating metadata record') gnosresp = requests.post(url, data=bytes(postbody, 'utf-8'), headers=headers, auth=auth(GEONETWORK)) if '<html>' in gnosresp.content.decode('utf-8'): log.error('update error') else: log.info('update success') return gnosresp except requests.exceptions.ConnectionError: log.error('Could not connect to gnos') return None except Exception as e: log.error(e) return None
def createJob(jsonDict, process): """ Method to parse prePC including fetching information from geonetwork and writing information to Jobtable as well as starting job in actinia-core This method can be called by HTTP POST @app.route('/processes/test/jobs') """ prePC_orig = json.dumps(jsonDict) # TODO: define prePC (pre processchain) model if differs from pc # prePC = prePC(**jsonDict) # as we don't hava a model yet # prePC = json.dumps(jsonDict) connection = checkConnectionWithoutResponse('actinia-core') if connection is not None: actiniaCoreResp = postActiniaCore(process, jsonDict) log.debug(actiniaCoreResp) # try: # prePCDict = prePC.to_struct() # except Exception as e: # log.error('prePC is invalid!') # log.error(e) # return None job = insertNewJob( jsonDict, jsonDict, # as we don't hava a model yet process, jsonDict.get( 'feature_type'), # empty at the moment (polygon later) actiniaCoreResp) if actiniaCoreResp['status'] == 'error': log.error("Error start processing in actinia-core") resourceId = parseActiniaIdFromUrl(actiniaCoreResp['resource_id']) job = updateJob(resourceId, actiniaCoreResp) return job else: return None
def buildPCDummy(preProcessChain): webhookUrl = APP.url + "/resources/processes/operations/update" log.debug("Registering own webhook endpoint: " + str(webhookUrl)) tpl = tplEnv.get_template('actiniaCore/pc_point_in_polygon.json') point = preProcessChain.get('point') polygon = preProcessChain.get('polygon') if polygon is None or point is None or webhookUrl is None: log.error('Could not set all variables to replace in template.') return None postbody = tpl.render(point=point, polygon=polygon, webhookUrl=webhookUrl).replace('\n', '') return postbody
def cancelJobById(jobid): """ Method to change the status of a job to 'TERMINATED' in the jobtabelle by using its jobid Args: jobid (int): id of job Returns: record (dict): the record matching the id """ utcnow = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') try: with jobdb: queryResult = Job.select().where( getattr(Job, JOBTABLE.id_field) == jobid).get() record = model_to_dict(queryResult) log.debug("Information read from jobtable.") except Job.DoesNotExist: record = None try: query = Job.update( status='TERMINATED', time_ended=utcnow).where(getattr(Job, JOBTABLE.id_field) == jobid) with jobdb: query.execute() queryResult2 = Job.select().where( getattr(Job, JOBTABLE.id_field) == jobid).get() record = model_to_dict(queryResult2) except Exception as e: log.error('Could not set the status to "TERMINATED" ' + 'and the time_ended in the jobtable.') log.error(str(e)) record = None log.info("Information updated in jobtable for job with id " + str(record['idpk_jobs']) + ".") jobdb.close() return record
def getAllJobs(filters, process): """ Method to read all jobs from jobtabelle with filter Args: filters (ImmutableMultiDict): the args from the HTTP call Returns: jobs (list): the records matching the filter """ log.debug('Received query for jobs') if process == 'test': query = Expression('a', '=', 'a') else: query = Expression(getattr(Job, 'process'), '=', process) if filters: log.debug("Found filters: " + str(filters)) keys = [key for key in filters] for key in keys: try: getattr(Job, key) except Exception as e: log.warning(str(e)) continue log.debug("Filter " + str(key) + " with value " + str(filters[key])) if isinstance(getattr(Job, key), AutoField): try: int(filters[key]) except Exception as e: log.error(str(e)) jobdb.close() return try: # even though operators are listed as == and & in peewee docs, # for Expression creation use '=' and 'AND'. exp = Expression(getattr(Job, key), '=', filters[key]) query = Expression(query, 'AND', exp) except AttributeError as e: log.error(str(e)) with jobdb: queryResult = Job.select().where(query).dicts() jobs = [] # iterating reopens db connection!! for i in queryResult: jobs.append(i) log.info("Found " + str(len(jobs)) + " results for query.") jobdb.close() return jobs
def getAllIds(): """ Method to read all jobs from jobtabelle Args: - Returns: jobIds (list): the record matching the id """ with jobdb: queryResult = Job.select(getattr(Job, JOBTABLE.id_field)).dicts() jobIds = [] # iterating reopens db connection!! for i in queryResult: jobIds.append(i[JOBTABLE.id_field]) log.debug("Information read from jobtable.") jobdb.close() return jobIds
def buildPCConnectionString(prePCTable): log.debug('Creating db connection string from "' + prePCTable + '"') # 'HOST=123;PORT=5432;DB=test;SCHEMA=actinia;TABLE=point' class DBClass(): db = GISTABLE.database host = GISTABLE.host port = GISTABLE.port user = GISTABLE.user schema = '' table = '' dbInstance = DBClass() for kvp in prePCTable.split(';'): key = kvp.split('=')[0] val = kvp.split('=')[1] if key == 'SCHEMA': dbInstance.schema = val if key == 'TABLE': dbInstance.table = val pcConnectionString = ("PG:dbname=" + dbInstance.db + " host=" + dbInstance.host + " port=" + dbInstance.port + " active_schema=" + dbInstance.schema + " user="******"Created db connection string: " + pcConnectionString + " - table: " + table) # ('PG:dbname=gis host=localhost port=5555 active_schema=actinia # user=actinia', 'points_for_testing') return pcConnectionString, table
def logstring(module_id, param, key): log.debug(module_id + " " + param + " has no key " + key)
from flask_restful_swagger_2 import Api from actinia_gdi import endpoints from actinia_gdi.core.jobtable import initJobDB from actinia_gdi.resources.logging import log from actinia_gdi.resources.config import APP app = Flask(__name__) CORS(app) apidoc = Api( app, title="actinia-gdi", api_version=APP.version, api_spec_url='/latest/api/swagger', schemes=['https', 'http'], consumes=['application/json'], description="""actinia GDI takes care of integrating actinia-core into an existing geodata infrastructure connecting clients, applications, databases, metadata and more. """) endpoints.addEndpoints(app, apidoc) initJobDB() if __name__ == '__main__': # call this for development only with `python -m actinia_gdi.main` log.debug('starting app in development mode...') app.run(debug=True, use_reloader=False) # for production environent use application in wsgy.py
""" __author__ = "Carmen Tawalika" __copyright__ = "2018-present mundialis GmbH & Co. KG" __license__ = "Apache-2.0" from peewee import PostgresqlDatabase, Model from peewee import CharField, DateTimeField, AutoField from playhouse.postgres_ext import BinaryJSONField from playhouse.pool import PooledPostgresqlExtDatabase from actinia_gdi.resources.config import JOBTABLE from actinia_gdi.resources.logging import log log.debug("Database config loaded: " + JOBTABLE.host + ":" + JOBTABLE.port + "/" + JOBTABLE.database + "/" + JOBTABLE.schema + "." + JOBTABLE.table) """database connection""" jobdb = PooledPostgresqlExtDatabase( JOBTABLE.database, **{ 'host': JOBTABLE.host, 'port': JOBTABLE.port, 'user': JOBTABLE.user, 'password': JOBTABLE.pw, 'max_connections': 8, 'stale_timeout': 300 }) class BaseModel(Model): """Base Model for tables in jobdb
def ParseInterfaceDescription(xml_string, keys=None): """Parses output of GRASS interface-description and returns openEO process object """ gm_dict = xmltodict.parse(xml_string)['task'] module_id = gm_dict['@name'] description = gm_dict['description'] categories = gm_dict['keywords'].replace(' ', '').split(',') categories.append('grass-module') parameters = {} returns = {} extrakwargs = dict() try: grass_params = gm_dict['parameter'] except KeyError: logstring(module_id, "", "has no parameter") grass_params = [] try: flags = gm_dict['flag'] except KeyError: logstring(module_id, "", "has no flags") flags = [] for parameter in grass_params: kwargs = dict() schema_kwargs = dict() if keys: # case for actinia modules key = setVirtualParameterKey(module_id, parameter) if key not in keys: continue else: # case for GRASS modules key = setParameterKey(module_id, parameter) schema_kwargs = setParamType(module_id, key, parameter, schema_kwargs) kwargs = setParameterDescription(module_id, key, parameter, kwargs) kwargs = setParameterRequired(parameter, kwargs) schema_kwargs = setParameterDefault(parameter, schema_kwargs) schema_kwargs = setParameterEnum(parameter, schema_kwargs) param_object = ModuleParameter(**kwargs, schema=ModuleParameterSchema( **schema_kwargs)) if isOutput(parameter): returns[key] = param_object else: parameters[key] = param_object del kwargs del schema_kwargs for parameter in flags: # not possible to specify flag values via template at the moment if keys: continue kwargs = dict() schema_kwargs = dict() schema_kwargs['type'] = 'boolean' schema_kwargs['default'] = 'False' key = setParameterKey(module_id, parameter) kwargs = setParameterDescription(module_id, key, parameter, kwargs) kwargs = setParameterRequired(parameter, kwargs) param_object = ModuleParameter(**kwargs, schema=ModuleParameterSchema( **schema_kwargs)) parameters[key] = param_object del kwargs del schema_kwargs # custom extention for importer + exporter from actinia_core try: tpl = tplEnv.get_template('gmodules/' + module_id + '.json') pc_template = json.loads(tpl.render().replace('\n', '')) for key in [*pc_template]: extrakwargs[key] = {} for param in pc_template[key]: extrakwargs[key][param] = ModuleParameter( **pc_template[key][param]) except Exception as e: # if no template for module exist, use as is (default) log.debug('template %s does not exist.', e) grass_module = Module(id=module_id, description=description, categories=sorted(categories), parameters=parameters, returns=returns, **extrakwargs) return grass_module
def updateJobByResourceID(resourceId, resp, status): """ Method to update job in jobtabelle when processing status changed Args: resourceId (str): actinia-core resourceId resp (dict): actinia-core response status (string): actinia-core processing status Returns: updatedRecord (TODO): the updated record """ utcnow = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') try: with jobdb: queryResult = Job.select().where( getattr(Job, 'actinia_core_jobid') == resourceId).get() record = model_to_dict(queryResult) log.debug("Information read from jobtable for job with id " + str(record['idpk_jobs']) + ".") except Job.DoesNotExist: log.warning("Job does not exist and can therefore not be updated") return None, None, None # actinia-gdi_["PENDING", "RUNNING", "SUCCEES", "ERROR", "TERMINATED"] # actinia-core [accepted, running, finished, error, terminated] try: log.debug("Update status to " + status + " for job with id " + str(record['idpk_jobs']) + ".") gdiStatus = record['status'] try: gnosUuid = record['job_description']['feature_uuid'] except Exception: log.warning('Feature has no uuid') gnosUuid = None if current_app.debug is False: smallRes = dict() smallRes['message'] = resp.get('message', None) smallRes['process_results'] = resp.get('process_results', None) resp = smallRes # resp = smallifyResp(resp) # TODO: test in debug, then remove # resp = smallifyResp(resp) if status == 'accepted': log.debug('Status already set to "PENDING"') return record, gnosUuid, utcnow elif status == 'running': if gdiStatus == 'RUNNING': log.debug('Status already set to "RUNNING"') return record, gnosUuid, utcnow query = Job.update( status='RUNNING', actinia_core_response=resp, time_started=utcnow # TODO: check if time_estimated can be set # time_estimated= ).where(getattr(Job, 'actinia_core_jobid') == resourceId) elif status in ['finished', 'error', 'terminated']: if status == 'finished': gdiStatus = 'SUCCESS' elif status == 'error': gdiStatus = 'ERROR' elif status == 'terminated': gdiStatus = 'TERMINATED' query = Job.update( status=gdiStatus, actinia_core_response=resp, time_ended=utcnow).where( getattr(Job, 'actinia_core_jobid') == resourceId) else: log.error('Could not set the status to actinia-core status:' + status + '(Status not found.)') return None, None, None with jobdb: query.execute() queryResult = Job.select().where( getattr(Job, 'actinia_core_jobid') == resourceId).get() record = model_to_dict(queryResult) except Exception as e: log.error('Could not set the status to actinia-core status: ' + status) log.error(str(e)) return None, None, None log.info("Updated status to " + status + " for job with id " + str(record['idpk_jobs']) + ".") jobdb.close() return record, gnosUuid, utcnow
def cancelJob(jobid): """ Method to cancel job from Jobtable by id This method can be called by HTTP POST @app.route('/processes/test/jobs/<jobid>/operations/cancel') """ job = getJobById(jobid) if not job == None: log.debug('The job with jobid ' + str(jobid) + ' exists') status = job['status'] resourceId = job['actinia_core_jobid'] if not status or not resourceId: log.error('Job status or resourceId is not set!') return None else: log.debug('Job status is ' + status + ' and resourceId is: ' + resourceId) connection = checkConnectionWithoutResponse('actinia-core') if connection is not None: if status in ['PENDING', 'RUNNING']: log.debug('Status is in PENDING or RUNNING, will cancel') res = cancelActiniaCore(resourceId) if res: log.debug('Actinia-Core response TRUE') job = cancelJobById(jobid) log.debug('Job in jobtable is ' + job['status']) return job else: log.debug('Actinia-Core response is None') return None else: log.debug('Status not in PENDING or RUNNING, pass') return job else: log.error('There is no connection to actinia-core') return None else: log.error('The job with jobid ' + str(jobid) + 'does not exist') return None
def initJobDB(): """Create jobtable on startup.""" Job.create_table(safe=True) log.debug('Created jobtable if not exists')
""" __author__ = "Carmen Tawalika" __copyright__ = "2018-present mundialis GmbH & Co. KG" __license__ = "Apache-2.0" from peewee import Model from peewee import CharField, DateTimeField, AutoField from playhouse.postgres_ext import BinaryJSONField from playhouse.pool import PooledPostgresqlExtDatabase from actinia_gdi.resources.config import JOBTABLE from actinia_gdi.resources.logging import log log.debug("Database config loaded: %s:%s/%s/%s.%s" % (JOBTABLE.host, JOBTABLE.port, JOBTABLE.database, JOBTABLE.schema, JOBTABLE.table)) """database connection""" jobdb = PooledPostgresqlExtDatabase( JOBTABLE.database, **{ 'host': JOBTABLE.host, 'port': JOBTABLE.port, 'user': JOBTABLE.user, 'password': JOBTABLE.pw, 'max_connections': 8, 'stale_timeout': 300 }) class BaseModel(Model): """Base Model for tables in jobdb