def cache_path(): cache_path = configuration.get_config_value("cache", "cache_path") if not cache_path: LOGGER.warn("No cache path configured. Using default value.") cache_path = os.path.join( configuration.get_config_value("server", "outputpath"), "cache") return cache_path
def _execute_process(self, async_, wps_request, wps_response): """Uses :module:`pywps.processing` module for sending process to background BUT first, check for maxprocesses configuration value :param async_: run in asynchronous mode :return: wps_response or None """ maxparallel = int(config.get_config_value('server', 'parallelprocesses')) running, stored = dblog.get_process_counts() # async if async_: # run immedietly LOGGER.debug("Running processes: {} of {} allowed parallelprocesses".format(running, maxparallel)) LOGGER.debug("Stored processes: {}".format(stored)) maxprocesses = int(config.get_config_value('server', 'maxprocesses')) if stored >= maxprocesses: raise ServerBusy('Maximum number of processes in queue reached. Please try later.') LOGGER.debug("Store process in job queue, uuid={}".format(self.uuid)) dblog.store_request(self.uuid, wps_request, self) wps_response._update_status(WPS_STATUS.ACCEPTED, 'PyWPS Process stored in job queue', 0) # not async else: if running >= maxparallel and maxparallel != -1: raise ServerBusy('Maximum number of parallel running processes reached. Please try later.') wps_response._update_status(WPS_STATUS.ACCEPTED, "PyWPS Request accepted", 0) wps_response = self.run_process(wps_request, wps_response) return wps_response
def start(args, kill = None): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "pywps.cfg") processes = [ FeatureCount(), SayHello(), Centroids(), UltimateQuestion(), Sleep(), Buffer(), Area(), Box(), Warp() ] s = Server(processes=processes, config_file=config_file) # TODO: need to spawn a different process for different server if args.waitress: import waitress from pywps import configuration configuration.load_configuration(config_file) host = configuration.get_config_value('wps', 'serveraddress').split('://')[1] port = int(configuration.get_config_value('wps', 'serverport')) waitress.serve(s.app, host=host, port=port) else: s.run()
def _execute_xml_reference(self): """ Decide what storage model to use and return Reference node """ doc = WPS.Reference() # get_url will create the file and return the url for it store_type = config.get_config_value('server', 'store_type') self.storage = None # chooses FileStorage, S3Storage or PgStorage based on a store_type value in cfg file if store_type == 'db' and \ config.get_config_value('db', 'dbname'): # TODO: more databases in config file self.storage = PgStorage() elif store_type == 's3' and \ config.get_config_value('s3', 'bucket_name'): self.storage = S3Storage() else: self.storage = FileStorage() doc.attrib['{http://www.w3.org/1999/xlink}href'] = self.get_url() if self.data_format: if self.data_format.mime_type: doc.attrib['mimeType'] = self.data_format.mime_type if self.data_format.encoding: doc.attrib['encoding'] = self.data_format.encoding if self.data_format.schema: doc.attrib['schema'] = self.data_format.schema return doc
def generate_recipe(diag, constraints=None, options=None, start_year=2000, end_year=2005, output_format='pdf', workdir=None): constraints = constraints or {} workdir = workdir or os.curdir workdir = os.path.abspath(workdir) output_dir = os.path.join(workdir, 'output') # write config.yml config_templ = template_env.get_template('config.yml') rendered_config = config_templ.render( archive_root=configuration.get_config_value("data", "archive_root"), obs_root=configuration.get_config_value("data", "obs_root"), output_dir=output_dir, output_format=output_format, ) config_file = os.path.abspath(os.path.join(workdir, "config.yml")) with open(config_file, 'w') as fp: fp.write(rendered_config) # write recipe.xml recipe = 'recipe_{0}.yml.j2'.format(diag) recipe_templ = template_env.get_template(recipe) rendered_recipe = recipe_templ.render( diag=diag, workdir=workdir, constraints=constraints, start_year=start_year, end_year=end_year, options=options, ) recipe_file = os.path.abspath(os.path.join(workdir, "recipe.yml")) with open(recipe_file, 'w') as fp: fp.write(rendered_recipe) return recipe_file, config_file
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('-w', '--waitress', action='store_true') args = parser.parse_args() config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "pywps.cfg") processes = [ FeatureCount(), SayHello(), Centroids(), UltimateQuestion(), Sleep(), Buffer(), Area(), Viewshed() ] s = Server(processes=processes, config_file=config_file) # TODO: need to spawn a different process for different server if args.waitress: import waitress from pywps import configuration configuration.load_configuration(config_file) host = configuration.get_config_value('wps', 'serveraddress').split('://')[1] port = int(configuration.get_config_value('wps', 'serverport')) waitress.serve(s.app, host=host, port=port) else: s.run()
def _run_process(self, wps_request, wps_response): try: self._set_grass(wps_request) # if required set HOME to the current working directory. if config.get_config_value('server', 'sethomedir') is True: os.environ['HOME'] = self.workdir LOGGER.info( 'Setting HOME to current working directory: {}'.format( os.environ['HOME'])) LOGGER.debug('ProcessID={}, HOME={}'.format( self.uuid, os.environ.get('HOME'))) wps_response._update_status(WPS_STATUS.STARTED, u'PyWPS Process started', 0) self.handler( wps_request, wps_response) # the user must update the wps_response. # Ensure process termination if wps_response.status != WPS_STATUS.SUCCEEDED and wps_response.status != WPS_STATUS.FAILED: # if (not wps_response.status_percentage) or (wps_response.status_percentage != 100): LOGGER.debug( 'Updating process status to 100% if everything went correctly' ) wps_response._update_status( WPS_STATUS.SUCCEEDED, 'PyWPS Process {} finished'.format(self.title), 100) except Exception as e: traceback.print_exc() LOGGER.debug( 'Retrieving file and line number where exception occurred') exc_type, exc_obj, exc_tb = sys.exc_info() found = False while not found: # search for the _handler method m_name = exc_tb.tb_frame.f_code.co_name if m_name == '_handler': found = True else: if exc_tb.tb_next is not None: exc_tb = exc_tb.tb_next else: # if not found then take the first exc_tb = sys.exc_info()[2] break fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] method_name = exc_tb.tb_frame.f_code.co_name # update the process status to display process failed msg = 'Process error: {}.{} Line {} {}'.format( fname, method_name, exc_tb.tb_lineno, e) LOGGER.error(msg) if config.get_config_value("logging", "level") != "DEBUG": msg = 'Process failed, please check server error log' wps_response._update_status(WPS_STATUS.FAILED, msg, 100) finally: # The run of the next pending request if finished here, weather or not it successfull self.launch_next_process() return wps_response
def __init__(self): """ """ super().__init__() self.output_url = config.get_config_value('server', 'outputurl') self.temp_url_key = get_temp_url_key() self.container = config.get_config_value('SwiftStorage', 'container')
def cache(self): """Return the path to the server cache directory.""" out = configuration.get_config_value("cache", "cache_path") if not out: LOGGER.warn("No cache path configured. Using default value.") out = join(configuration.get_config_value("server", "outputpath"), "cache") return out
def get_session(): """Get Connection for database """ LOGGER.debug('Initializing database connection') global _SESSION_MAKER global _LAST_SESSION if _LAST_SESSION: _LAST_SESSION.close() if _SESSION_MAKER: _SESSION_MAKER.close_all() _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION database = configuration.get_config_value('logging', 'database') echo = True level = configuration.get_config_value('logging', 'level') if level in ['INFO']: echo = False try: engine = sqlalchemy.create_engine(database, echo=echo) except sqlalchemy.exc.SQLAlchemyError as e: raise NoApplicableCode("Could not connect to database: {}".format( e.message)) Session = sessionmaker(bind=engine) ProcessInstance.metadata.create_all(engine) RequestInstance.metadata.create_all(engine) _SESSION_MAKER = Session _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION
def __init__(self, host=None, port=None, debug=False, processes=[], config_file=None): self.app = flask.Flask(__name__) # Load config files and override settings if any file specified if config_file: configuration.load_configuration(config_file) self.host = configuration.get_config_value('wps', 'serveraddress').split('://')[1] self.port = int(configuration.get_config_value('wps', 'serverport')) # Override config host and port if they are passed to the constructor if host: self.host = host if port: self.port = port self.debug = debug self.output_url = configuration.get_config_value('server', 'outputUrl') self.output_path = configuration.get_config_value('server', 'outputPath') self.temp_path = configuration.get_config_value('server', 'tempPath') # check if in the configuration file specified directory exists otherwise create it try: if not os.path.exists(self.temp_path): os.makedirs(self.temp_path) print('%s does not exist. Creating it.' % self.temp_path) if not os.path.exists(self.output_path): os.makedirs(self.output_path) print('%s does not exist. Creating it.' % self.output_path) except Exception as e: raise NoApplicableCode('File error: Could not create folder. %s' % e) self.processes = processes self.service = Service(processes=self.processes)
def get_session(): """Get Connection for database """ LOGGER.debug('Initializing database connection') global _SESSION_MAKER global _LAST_SESSION if _LAST_SESSION: _LAST_SESSION.close() if _SESSION_MAKER: _SESSION_MAKER.close_all() _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION database = configuration.get_config_value('logging', 'database') echo = True level = configuration.get_config_value('logging', 'level') if level in ['INFO']: echo = False try: engine = sqlalchemy.create_engine(database, echo=echo) except sqlalchemy.exc.SQLAlchemyError as e: raise NoApplicableCode("Could not connect to database: {}".format(e.message)) Session = sessionmaker(bind=engine) ProcessInstance.metadata.create_all(engine) RequestInstance.metadata.create_all(engine) _SESSION_MAKER = Session _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION
def start(args, kill=None): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "pywps.cfg") processes = [ FeatureCount(), SayHello(), Centroids(), UltimateQuestion(), Sleep(), Buffer(), Area(), Box(), Warp() ] s = Server(processes=processes, config_file=config_file) # TODO: need to spawn a different process for different server if args.waitress: import waitress from pywps import configuration configuration.load_configuration(config_file) host = configuration.get_config_value('wps', 'serveraddress').split('://')[1] port = int(configuration.get_config_value('wps', 'serverport')) waitress.serve(s.app, host=host, port=port) else: s.run()
def get_connstr(): connstr = "PG:dbname={} user={} password={} host={}".format( get_config_value("db", "dbname"), get_config_value("db", "user"), get_config_value("db", "password"), get_config_value("db", "host") ) return(connstr)
def __init__(self): """ """ self.drive_secret_file = config.get_config_value( 'remote-storage', 'drive_secret_file') self.target = config.get_config_value('server', 'outputpath') self.output_url = '%s%s' % (config.get_config_value( 'server', 'url'), config.get_config_value('server', 'outputurl'))
def __init__(self): """ """ self.target = config.get_config_value('server', 'outputPath') self.output_url = '%s:%s%s' % ( config.get_config_value('wps', 'serveraddress'), config.get_config_value('wps', 'serverport'), config.get_config_value('server', 'outputUrl'))
def test_s3_html_chart_upload(): config.load_configuration(TEST_CFG) bucket = config.get_config_value("s3", "bucket") region = config.get_config_value("s3", "region") location = {'LocationConstraint': region} client = boto3.client("s3", region_name=region) client.create_bucket(Bucket=bucket, CreateBucketConfiguration=location) upload_chart_html_to_S3(TEST_CHART, "abcd")
def build(self): bucket = wpsConfig.get_config_value('s3', 'bucket') prefix = wpsConfig.get_config_value('s3', 'prefix') public_access = wpsConfig.get_config_value('s3', 'public') encrypt = wpsConfig.get_config_value('s3', 'encrypt') region = wpsConfig.get_config_value('s3', 'region') return S3Storage(bucket, prefix, public_access, encrypt, region)
def __init__(self): """ """ self.dropbox_app_key = config.get_config_value('remote-storage', 'dropbox_access_token') self.target = config.get_config_value('server', 'outputpath') self.output_url = '%s%s' % (config.get_config_value( 'server', 'url'), config.get_config_value('server', 'outputurl'))
def __init__(self): """ """ self.target = config.get_config_value('server', 'outputpath') self.output_url = '%s%s' % ( config.get_config_value('server', 'url'), config.get_config_value('server', 'outputurl') )
def __init__(self): """ """ self.dropbox_app_key = config.get_config_value('remote-storage', 'dropbox_access_token') self.target = config.get_config_value('server', 'outputpath') self.output_url = '%s%s' % ( config.get_config_value('server', 'url'), config.get_config_value('server', 'outputurl') )
def __init__(self): """ """ self.drive_secret_file = config.get_config_value('remote-storage', 'drive_secret_file') self.target = config.get_config_value('server', 'outputpath') self.output_url = '%s%s' % ( config.get_config_value('server', 'url'), config.get_config_value('server', 'outputurl') )
def __init__(self): """ """ self.target = config.get_config_value('server', 'outputPath') self.output_url = '%s:%s%s' % ( config.get_config_value('wps', 'serveraddress'), config.get_config_value('wps', 'serverport'), config.get_config_value('server', 'outputUrl') )
def _assign_port(): port_min = int(config.get_config_value("processing", "port_min")) port_max = int(config.get_config_value("processing", "port_max")) for port in range(port_min, port_max): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) res = sock.connect_ex(('127.0.0.1', port)) # TODO find better solution for errno if res != 0: return port raise NoAvailablePort("No port from range {}-{} available.".format( port_min, port_max))
def _run_process(self, wps_request, wps_response): LOGGER.debug("Started processing request: {}".format(self.uuid)) try: self._set_grass(wps_request) # if required set HOME to the current working directory. if config.get_config_value('server', 'sethomedir') is True: os.environ['HOME'] = self.workdir LOGGER.info('Setting HOME to current working directory: {}'.format(os.environ['HOME'])) LOGGER.debug('ProcessID={}, HOME={}'.format(self.uuid, os.environ.get('HOME'))) wps_response._update_status(WPS_STATUS.STARTED, u'PyWPS Process started', 0) self.handler(wps_request, wps_response) # the user must update the wps_response. # Ensure process termination if wps_response.status != WPS_STATUS.SUCCEEDED and wps_response.status != WPS_STATUS.FAILED: # if (not wps_response.status_percentage) or (wps_response.status_percentage != 100): LOGGER.debug('Updating process status to 100% if everything went correctly') wps_response._update_status(WPS_STATUS.SUCCEEDED, 'PyWPS Process {} finished'.format(self.title), 100) except Exception as e: traceback.print_exc() LOGGER.debug('Retrieving file and line number where exception occurred') exc_type, exc_obj, exc_tb = sys.exc_info() found = False while not found: # search for the _handler method m_name = exc_tb.tb_frame.f_code.co_name if m_name == '_handler': found = True else: if exc_tb.tb_next is not None: exc_tb = exc_tb.tb_next else: # if not found then take the first exc_tb = sys.exc_info()[2] break fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] method_name = exc_tb.tb_frame.f_code.co_name # update the process status to display process failed msg = 'Process error: method={}.{}, line={}, msg={}'.format(fname, method_name, exc_tb.tb_lineno, e) LOGGER.error(msg) # In case of a ProcessError use the validated exception message. if isinstance(e, ProcessError): msg = "Process error: {}".format(e) # Only in debug mode we use the log message including the traceback ... elif config.get_config_value("logging", "level") != "DEBUG": # ... otherwise we use a sparse common error message. msg = 'Process failed, please check server error log' wps_response._update_status(WPS_STATUS.FAILED, msg, 100) finally: # The run of the next pending request if finished here, weather or not it successfull self.launch_next_process() return wps_response
def _set_uuid(self, uuid): """Set uuid and status ocation apth and url """ self.uuid = uuid file_path = config.get_config_value('server', 'outputpath') file_url = config.get_config_value('server', 'outputurl') self.status_location = os.path.join(file_path, str(self.uuid)) + '.xml' self.status_url = os.path.join(file_url, str(self.uuid)) + '.xml'
def __init__(self): # TODO: more databases in config file # create connection string dbsettings = "db" self.dbname = config.get_config_value(dbsettings, "dbname") self.target = "dbname={} user={} password={} host={}".format( self.dbname, config.get_config_value(dbsettings, "user"), config.get_config_value(dbsettings, "password"), config.get_config_value(dbsettings, "host") ) self.schema_name = self._create_schema()
def __init__(self, cfgfiles=None): global LOGGER config.load_configuration(cfgfiles) LOGGER = get_logger(file=config.get_config_value('logging', 'file'), level=config.get_config_value('logging', 'level'), format=config.get_config_value( 'logging', 'format')) self.max_time = int(config.get_config_value('jobqueue', 'pause')) self.maxparallel = int( config.get_config_value('server', 'parallelprocesses'))
def __init__(self, processes=[], cfgfiles=None): self.processes = {p.identifier: p for p in processes} if cfgfiles: config.load_configuration(cfgfiles) if config.get_config_value('server', 'logfile') and config.get_config_value('server', 'loglevel'): LOGGER.setLevel(getattr(logging, config.get_config_value('server', 'loglevel'))) msg_fmt = '%(asctime)s] [%(levelname)s] file=%(pathname)s line=%(lineno)s module=%(module)s function=%(funcName)s %(message)s' fh = logging.FileHandler(config.get_config_value('server', 'logfile')) fh.setFormatter(logging.Formatter(msg_fmt)) LOGGER.addHandler(fh) else: # NullHandler LOGGER.addHandler(logging.NullHandler())
def __init__(self, processes=[], cfgfile=None): self.processes = {p.identifier: p for p in processes} if cfgfile: config.load_configuration(cfgfile) if config.get_config_value('server', 'logfile') and config.get_config_value('server', 'loglevel'): LOGGER.setLevel(getattr(logging, config.get_config_value('server', 'loglevel'))) msg_fmt = '%(asctime)s] [%(levelname)s] file=%(pathname)s line=%(lineno)s module=%(module)s function=%(funcName)s %(message)s' fh = logging.FileHandler(config.get_config_value('server', 'logfile')) fh.setFormatter(logging.Formatter(msg_fmt)) LOGGER.addHandler(fh) else: # NullHandler LOGGER.addHandler(logging.NullHandler())
def run_process(self, wps_request, wps_response): LOGGER.debug("Started processing request: {}".format(self.uuid)) try: self._set_grass(wps_request) # if required set HOME to the current working directory. if config.get_config_value('server', 'sethomedir') is True: os.environ['HOME'] = self.workdir LOGGER.info('Setting HOME to current working directory: {}'.format(os.environ['HOME'])) LOGGER.debug('ProcessID={}, HOME={}'.format(self.uuid, os.environ.get('HOME'))) wps_response._update_status(WPS_STATUS.STARTED, 'PyWPS Process started', 0) self.handler(wps_request, wps_response) # the user must update the wps_response. # Ensure process termination if wps_response.status != WPS_STATUS.SUCCEEDED and wps_response.status != WPS_STATUS.FAILED: # if (not wps_response.status_percentage) or (wps_response.status_percentage != 100): LOGGER.debug('Updating process status to 100% if everything went correctly') wps_response._update_status(WPS_STATUS.SUCCEEDED, 'PyWPS Process {} finished'.format(self.title), 100) except Exception as e: traceback.print_exc() LOGGER.debug('Retrieving file and line number where exception occurred') exc_type, exc_obj, exc_tb = sys.exc_info() found = False while not found: # search for the _handler method m_name = exc_tb.tb_frame.f_code.co_name if m_name == '_handler': found = True else: if exc_tb.tb_next is not None: exc_tb = exc_tb.tb_next else: # if not found then take the first exc_tb = sys.exc_info()[2] break fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] method_name = exc_tb.tb_frame.f_code.co_name # update the process status to display process failed msg = 'Process error: method={}.{}, line={}, msg={}'.format(fname, method_name, exc_tb.tb_lineno, e) LOGGER.error(msg) # In case of a ProcessError use the validated exception message. if isinstance(e, ProcessError): msg = "Process error: {}".format(e) # Only in debug mode we use the log message including the traceback ... elif config.get_config_value("logging", "level") != "DEBUG": # ... otherwise we use a sparse common error message. msg = 'Process failed, please check server error log' wps_response._update_status(WPS_STATUS.FAILED, msg, 100) return wps_response
def _post_request(self): """HTTP GET request parser """ # check if input file size was not exceeded maxsize = configuration.get_config_value('server', 'maxrequestsize') maxsize = configuration.get_size_mb(maxsize) * 1024 * 1024 if self.http_request.content_length > maxsize: raise FileSizeExceeded( 'File size for input exceeded.' ' Maximum request size allowed: {} megabytes'.format( maxsize / 1024 / 1024)) try: doc = lxml.etree.fromstring(self.http_request.get_data()) except Exception as e: if PY2: raise NoApplicableCode(e.message) else: raise NoApplicableCode(e.msg) operation = doc.tag version = get_version_from_ns(doc.nsmap[doc.prefix]) self.set_version(version) request_parser = self._post_request_parser(operation) request_parser(doc)
def test_get_opendap_datasets_bccaqv2(mock_tdscatalog): names = [ "tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp85_r1i1p1_19500101-21001231.nc", "tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp45_r1i1p1_19500101-21001231.nc", "tasmin_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc", "tasmax_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc", "tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-M_historical+rcp26_r1i1p1_19500101-21001231.nc", "tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231.nc", "tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231.nc", ] catalog_url = configuration.get_config_value("finch", "dataset_bccaqv2") variable = "tasmin" rcp = "rcp45" mock_catalog = mock.MagicMock() mock_tdscatalog.return_value = mock_catalog def make_dataset(name): dataset = mock.MagicMock() dataset.access_urls = {"OPENDAP": "url"} dataset.name = name return dataset mock_catalog.datasets = {name: make_dataset(name) for name in names} urls = get_bccaqv2_opendap_datasets(catalog_url, [variable], rcp) assert len(urls) == 2
def execute(self, identifier, wps_request, uuid): """Parse and perform Execute WPS request call :param identifier: process identifier string :param wps_request: pywps.WPSRequest structure with parsed inputs, still in memory :param uuid: string identifier of the request """ self._set_grass() response = None try: process = self.processes[identifier] # make deep copy of the process instace # so that processes are not overriding each other # just for execute process = copy.deepcopy(process) workdir = os.path.abspath(config.get_config_value('server', 'workdir')) tempdir = tempfile.mkdtemp(prefix='pywps_process_', dir=workdir) process.set_workdir(tempdir) except KeyError: raise InvalidParameterValue("Unknown process '%r'" % identifier, 'Identifier') olddir = os.path.abspath(os.curdir) try: os.chdir(process.workdir) response = self._parse_and_execute(process, wps_request, uuid) finally: os.chdir(olddir) return response
def _execute_xml_reference(self): """Return Reference node """ doc = WPS.Reference() storage_option = config.get_config_value('remote-storage', 'storage_option') if storage_option == 'ftp': self.storage = FTPStorage() elif storage_option == 'dropbox': self.storage = DropboxStorage() elif storage_option == 'googledrive': self.storage = GoogleDriveStorage() else: self.storage = FileStorage() # get_url will create the file and return the url for it doc.attrib['{http://www.w3.org/1999/xlink}href'] = self.get_url() if self.data_format: if self.data_format.mime_type: doc.attrib['mimeType'] = self.data_format.mime_type if self.data_format.encoding: doc.attrib['encoding'] = self.data_format.encoding if self.data_format.schema: doc.attrib['schema'] = self.data_format.schema return doc
def esgfsearch_url(): """Return the server configuration value for the ESGF search node URL.""" url = configuration.get_config_value("extra", "esgfsearch_url") if not url: LOGGER.warn("No ESGF Search URL configured. Using default value.") url = 'https://esgf-data.dkrz.de/esg-search' return url
def execute(self, identifier, wps_request, uuid): """Parse and perform Execute WPS request call :param identifier: process identifier string :param wps_request: pywps.WPSRequest structure with parsed inputs, still in memory :param uuid: string identifier of the request """ self._set_grass() response = None try: process = self.processes[identifier] workdir = os.path.abspath(config.get_config_value("server", "workdir")) tempdir = tempfile.mkdtemp(prefix="pywps_process_", dir=workdir) process.set_workdir(tempdir) except KeyError: raise InvalidParameterValue("Unknown process '%r'" % identifier, "Identifier") olddir = os.path.abspath(os.curdir) try: os.chdir(process.workdir) response = self._parse_and_execute(process, wps_request, uuid) finally: os.chdir(olddir) return response
def _set_grass(self): """Set environment variables needed for GRASS GIS support """ if not PY2: LOGGER.debug("Python3 is not supported by GRASS") return gisbase = config.get_config_value("grass", "gisbase") if gisbase and os.path.isdir(gisbase): LOGGER.debug("GRASS GISBASE set to %s" % gisbase) os.environ["GISBASE"] = gisbase os.environ["LD_LIBRARY_PATH"] = "{}:{}".format( os.environ.get("LD_LIBRARY_PATH"), os.path.join(gisbase, "lib") ) os.putenv("LD_LIBRARY_PATH", os.environ.get("LD_LIBRARY_PATH")) os.environ["PATH"] = "{}:{}:{}".format( os.environ.get("PATH"), os.path.join(gisbase, "bin"), os.path.join(gisbase, "scripts") ) os.putenv("PATH", os.environ.get("PATH")) python_path = os.path.join(gisbase, "etc", "python") os.environ["PYTHONPATH"] = "{}:{}".format(os.environ.get("PYTHONPATH"), python_path) os.putenv("PYTHONPATH", os.environ.get("PYTHONPATH")) sys.path.insert(0, python_path)
def href_handler(complexinput, datain): """<wps:Reference /> handler""" tmp_dir = config.get_config_value('server', 'workdir') # save the reference input in workdir tmp_file = tempfile.mkstemp(dir=complexinput.workdir)[1] try: (reference_file, reference_file_data) = _openurl(href) data_size = reference_file.headers.get('Content-Length', 0) except Exception as e: raise NoApplicableCode('File reference error: %s' % e) # if the response did not return a 'Content-Length' header then calculate the size if data_size == 0: data_size = _get_datasize(reference_file_data) # check if input file size was not exceeded complexinput.calculate_max_input_size() byte_size = complexinput.max_size * 1024 * 1024 if int(data_size) > int(byte_size): raise FileSizeExceeded('File size for input exceeded.' ' Maximum allowed: %i megabytes' %\ complexinput.max_size, complexinput.get('identifier')) try: with open(tmp_file, 'w') as f: f.write(reference_file_data) f.close() except Exception as e: raise NoApplicableCode(e) complexinput.file = tmp_file complexinput.url = href complexinput.as_reference = True
def execute(self, identifier, wps_request): """Parse and perform Execute WPS request call :param identifier: process identifier string :param wps_request: pywps.WPSRequest structure with parsed inputs, still in memory """ response = None try: process = self.processes[identifier] workdir = config.get_config_value('server', 'workdir') tempdir = tempfile.mkdtemp(prefix='pypws_process_', dir=workdir) process.set_workdir(tempdir) except KeyError: raise InvalidParameterValue("Unknown process '%r'" % identifier) olddir = os.path.abspath(os.curdir) try: os.chdir(process.workdir) response = self._parse_and_execute(process, wps_request) os.chdir(olddir) shutil.rmtree(process.workdir) except Exception as e: os.chdir(olddir) shutil.rmtree(process.workdir) raise e return response
def _set_grass(self): """Set environment variables needed for GRASS GIS support """ if not PY2: LOGGER.debug('Python3 is not supported by GRASS') return gisbase = config.get_config_value('grass', 'gisbase') if gisbase and os.path.isdir(gisbase): LOGGER.debug('GRASS GISBASE set to %s' % gisbase) os.environ['GISBASE'] = gisbase os.environ['LD_LIBRARY_PATH'] = '{}:{}'.format( os.environ.get('LD_LIBRARY_PATH'), os.path.join(gisbase, 'lib')) os.putenv('LD_LIBRARY_PATH', os.environ.get('LD_LIBRARY_PATH')) os.environ['PATH'] = '{}:{}:{}'.format( os.environ.get('PATH'), os.path.join(gisbase, 'bin'), os.path.join(gisbase, 'scripts')) os.putenv('PATH', os.environ.get('PATH')) python_path = os.path.join(gisbase, 'etc', 'python') os.environ['PYTHONPATH'] = '{}:{}'.format(os.environ.get('PYTHONPATH'), python_path) os.putenv('PYTHONPATH', os.environ.get('PYTHONPATH')) sys.path.insert(0, python_path)
def __init__(self, processes=[], cfgfiles=None): # ordered dict of processes self.processes = OrderedDict((p.identifier, p) for p in processes) if cfgfiles: config.load_configuration(cfgfiles) if config.get_config_value('logging', 'file') and config.get_config_value('logging', 'level'): LOGGER.setLevel(getattr(logging, config.get_config_value('logging', 'level'))) if not LOGGER.handlers: # hasHandlers in Python 3.x fh = logging.FileHandler(config.get_config_value('logging', 'file')) fh.setFormatter(logging.Formatter(config.get_config_value('logging', 'format'))) LOGGER.addHandler(fh) else: # NullHandler | StreamHandler if not LOGGER.handlers: LOGGER.addHandler(logging.NullHandler())
def execute(self, identifier, wps_request, uuid): """Parse and perform Execute WPS request call :param identifier: process identifier string :param wps_request: pywps.WPSRequest structure with parsed inputs, still in memory :param uuid: string identifier of the request """ self._set_grass() response = None try: process = self.processes[identifier] # make deep copy of the process instace # so that processes are not overriding each other # just for execute process = copy.deepcopy(process) workdir = os.path.abspath( config.get_config_value('server', 'workdir')) tempdir = tempfile.mkdtemp(prefix='pywps_process_', dir=workdir) process.set_workdir(tempdir) except KeyError: raise InvalidParameterValue("Unknown process '%r'" % identifier, 'Identifier') olddir = os.path.abspath(os.curdir) try: os.chdir(process.workdir) response = self._parse_and_execute(process, wps_request, uuid) finally: os.chdir(olddir) return response
def _set_grass(self): """Set environment variables needed for GRASS GIS support """ if not PY2: LOGGER.debug('Python3 is not supported by GRASS') return gisbase = config.get_config_value('grass', 'gisbase') if gisbase and os.path.isdir(gisbase): LOGGER.debug('GRASS GISBASE set to %s' % gisbase) os.environ['GISBASE'] = gisbase os.environ['LD_LIBRARY_PATH'] = '{}:{}'.format( os.environ.get('LD_LIBRARY_PATH'), os.path.join(gisbase, 'lib')) os.putenv('LD_LIBRARY_PATH', os.environ.get('LD_LIBRARY_PATH')) os.environ['PATH'] = '{}:{}:{}'.format( os.environ.get('PATH'), os.path.join(gisbase, 'bin'), os.path.join(gisbase, 'scripts')) os.putenv('PATH', os.environ.get('PATH')) python_path = os.path.join(gisbase, 'etc', 'python') os.environ['PYTHONPATH'] = '{}:{}'.format( os.environ.get('PYTHONPATH'), python_path) os.putenv('PYTHONPATH', os.environ.get('PYTHONPATH')) sys.path.insert(0, python_path)
def archive_root(): value = configuration.get_config_value("data", "archive_root") if value: path_list = [path.strip() for path in value.split(':')] LOGGER.debug("using archive root %s", path_list) else: path_list = [] return path_list
def calculate_max_input_size(self): """Calculates maximal size for input file based on configuration and units :return: maximum file size bytes """ max_size = configuration.get_config_value('server', 'maxsingleinputsize') self.max_size = configuration.get_size_mb(max_size)
def max_input_size(): """Calculates maximal size for input file based on configuration and units. :return: maximum file size in bytes """ ms = config.get_config_value('server', 'maxsingleinputsize') return config.get_size_mb(ms) * 1024**2
def _construct_doc(self): if not self.identifiers: raise MissingParameterValue('Missing parameter value "identifier"', 'identifier') template = self.template_env.get_template(self.version + '/describe/main.xml') max_size = int(config.get_size_mb(config.get_config_value('server', 'maxsingleinputsize'))) doc = template.render(max_size=max_size, **self.json) return doc
def test_default_mode(self): """Test pywps.formats.Format class """ self.assertEqual(configuration.get_config_value('processing', 'mode'), 'default') process = pywps.processing.Process( process=self.dummy_process, wps_request=self.wps_request, wps_response=self.wps_response) # process.start() self.assertTrue(isinstance(process, MultiProcessing))
def get_connection(): """Get Connection for database """ LOGGER.debug('Initializing database connection') global _CONNECTION if _CONNECTION: return _CONNECTION database = configuration.get_config_value('server', 'logdatabase') if not database: database = ':memory:' connection = sqlite3.connect(database) if check_db_table(connection): if check_db_columns(connection): _CONNECTION = connection else: raise NoApplicableCode(""" Columns in the table 'pywps_requests' or 'pywps_stored_requests' in database '%s' are in conflict """ % database) else: _CONNECTION = sqlite3.connect(database, check_same_thread=False) cursor = _CONNECTION.cursor() createsql = """ CREATE TABLE pywps_requests( uuid VARCHAR(255) not null primary key, pid INTEGER not null, operation varchar(30) not null, version varchar(5) not null, time_start text not null, time_end text, identifier text, message text, percent_done float, status varchar(30) ) """ cursor.execute(createsql) createsql = """ CREATE TABLE pywps_stored_requests( uuid VARCHAR(255) not null primary key, request BLOB not null ) """ cursor.execute(createsql) _CONNECTION.commit() return _CONNECTION
def _get_serviceinstance(self): url = config.get_config_value("server", "url") params = {'request': 'GetCapabilities', 'service': 'WPS'} url_parts = list(urlparse.urlparse(url)) query = dict(urlparse.parse_qsl(url_parts[4])) query.update(params) url_parts[4] = urlencode(query) return urlparse.urlunparse(url_parts).replace("&", "&")
def get_session(): """Get Connection for database """ LOGGER.debug('Initializing database connection') global _SESSION_MAKER global _LAST_SESSION if _LAST_SESSION: _LAST_SESSION.close() if _SESSION_MAKER: _SESSION_MAKER.close_all() _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION database = configuration.get_config_value('logging', 'database') echo = True level = configuration.get_config_value('logging', 'level') level_name = logging.getLevelName(level) if isinstance(level_name, int) and level_name >= logging.INFO: echo = False try: if database.startswith("sqlite") or database.startswith("memory"): engine = sqlalchemy.create_engine(database, connect_args={'check_same_thread': False}, poolclass=StaticPool, echo=echo) else: engine = sqlalchemy.create_engine(database, echo=echo, poolclass=NullPool) except sqlalchemy.exc.SQLAlchemyError as e: raise NoApplicableCode("Could not connect to database: {}".format(e.message)) Session = sessionmaker(bind=engine) ProcessInstance.metadata.create_all(engine) RequestInstance.metadata.create_all(engine) _SESSION_MAKER = Session _LAST_SESSION = _SESSION_MAKER() return _LAST_SESSION
def _execute_process(self, async_, wps_request, wps_response): """Uses :module:`pywps.processing` module for sending process to background BUT first, check for maxprocesses configuration value :param async_: run in asynchronous mode :return: wps_response or None """ maxparallel = int(config.get_config_value('server', 'parallelprocesses')) running, stored = dblog.get_process_counts() # async if async_: # run immedietly LOGGER.debug("Running processes: {} of {} allowed parallelprocesses".format(running, maxparallel)) LOGGER.debug("Stored processes: {}".format(stored)) if running < maxparallel or maxparallel == -1: wps_response._update_status(WPS_STATUS.ACCEPTED, u"PyWPS Request accepted", 0) LOGGER.debug("Accepted request {}".format(self.uuid)) self._run_async(wps_request, wps_response) # try to store for later usage else: maxprocesses = int(config.get_config_value('server', 'maxprocesses')) if stored >= maxprocesses: raise ServerBusy('Maximum number of processes in queue reached. Please try later.') LOGGER.debug("Store process in job queue, uuid={}".format(self.uuid)) dblog.store_process(self.uuid, wps_request) wps_response._update_status(WPS_STATUS.ACCEPTED, u'PyWPS Process stored in job queue', 0) # not async else: if running >= maxparallel and maxparallel != -1: raise ServerBusy('Maximum number of parallel running processes reached. Please try later.') wps_response._update_status(WPS_STATUS.ACCEPTED, u"PyWPS Request accepted", 0) wps_response = self._run_process(wps_request, wps_response) return wps_response
def _store_process(self, stored, wps_request, wps_response): """Try to store given requests """ maxprocesses = int(config.get_config_value('server', 'maxprocesses')) if stored < maxprocesses: dblog.store_process(self.uuid, wps_request) else: raise ServerBusy('Maximum number of parallel running processes reached. Please try later.') return wps_response
def execute(self, wps_request): import multiprocessing self.uuid = str(uuid4()) async = False wps_response = WPSResponse(self, wps_request) # check if status storage and updating are supported by this process if wps_request.store_execute == 'true': if self.store_supported != 'true': raise StorageNotSupported('Process does not support the storing of the execute response') file_path = config.get_config_value('server', 'outputPath') file_url = '%s:%s%s' % ( config.get_config_value('wps', 'serveraddress'), config.get_config_value('wps', 'serverport'), config.get_config_value('server', 'outputUrl') ) self.status_location = os.path.join(file_path, self.uuid) + '.xml' self.status_url = os.path.join(file_url, self.uuid) + '.xml' if wps_request.status == 'true': if self.status_supported != 'true': raise OperationNotSupported('Process does not support the updating of status') wps_response.status = WPSResponse.STORE_AND_UPDATE_STATUS async = True else: wps_response.status = WPSResponse.STORE_STATUS # check if updating of status is not required then no need to spawn a process if async: process = multiprocessing.Process(target=self._run_process, args=(wps_request, wps_response)) process.start() else: wps_response = self._run_process(wps_request, wps_response) return wps_response