def get_data_path(layer, fh, mr): """ function to find the datapath based on either the layer metadata or on the WMS time parameters from the user :param mr: TODO :param fh: TODO :returns: filepath """ model_run = re.sub("[^0-9]", "", mr) forecast = re.sub("[^0-9]", "", fh) if model_run not in [None, '']: id_ = '{}-{}-{}'.format(layer, model_run, forecast) else: id_ = '{}-{}'.format(layer, forecast) ti = load_plugin('tileindex', TILEINDEX_PROVIDER_DEF) try: res = ti.get(id_) filepath = res['properties']['filepath'] url = res['properties']['url'] res_arr = [filepath, url] except TileNotFoundError as err: LOGGER.debug(err) raise TileNotFoundError(err) return res_arr
def set_key(ctx, key, mapfile, map_, raw): """populate store""" if all([key is None, mapfile is None]): raise click.ClickException('Missing --key/-k or --mapfile/-m option') provider_def = {'type': STORE_TYPE, 'url': STORE_URL} st = load_plugin('store', provider_def) mapfile_ = mappyfile.open(mapfile, expand_includes=False) try: value = (mappyfile.dumps(mapfile_) if map_ else mappyfile.dumps(mapfile_['layers'])) if raw: click.echo(f'Setting {key} in store ({st.url})') st.set_key(key, value, raw=True) else: click.echo(f'Setting geomet-mapfile_{key} in store ({st.url})') st.set_key(key, value) except StoreError as err: raise click.ClickException(err) click.echo('Done')
def get_key(ctx, key, raw): """get key from store""" if all([key is None]): raise click.ClickException('Missing --key/-k') provider_def = {'type': STORE_TYPE, 'url': STORE_URL} st = load_plugin('store', provider_def) try: if raw: click.echo('Getting {} key from store ({}).'.format(key, st.url)) retrieved_key = st.get_key(key, raw=True) else: click.echo('Getting geomet-mapfile_{} key from store ({}).'.format( key, st.url)) retrieved_key = st.get_key(key) if retrieved_key: click.echo(retrieved_key) except StoreError as err: raise click.ClickException(err) click.echo('Done')
def test_load_plugin(self): """test plugin loading""" provider_def = { 'type': 'Redis', 'url': 'redis://localhost:9200', } result = load_plugin('store', provider_def) self.assertIsInstance(result, RedisStore)
def teardown(ctx, group=None): """delete store""" provider_def = {'type': STORE_TYPE, 'url': STORE_URL, 'group': group} st = load_plugin('store', provider_def) try: click.echo('Deleting store {}'.format(st.url)) st.teardown() except StoreError as err: raise click.ClickException(err) click.echo('Done')
def update_mapfile(layer=None): """ Updates a mapfile. :param layer: `str` of layer ID to update :returns: `bool` of update result """ if layer: mapfiles = [ f'{BASEDIR}{os.sep}mapfile{os.sep}geomet-weather-{layer}_layer.map' ] else: mapfiles = glob(f'{BASEDIR}{os.sep}mapfile{os.sep}*_layer.map') for mapfile in mapfiles: try: LOGGER.debug(f'Updating {mapfile}.') with open(mapfile, 'r+') as fp: mapfile_ = fp.read() updated_mapfile = find_replace_wms_timedefault( mapfile, mapfile_ ) # go to start of file and re-write mapfile fp.seek(0) fp.write(updated_mapfile) except FileNotFoundError as e: LOGGER.error(e) pass # update mapfiles in store if MAPFILE_STORAGE set to store if MAPFILE_STORAGE == 'store': st = load_plugin('store', PROVIDER_DEF) if layer: mapfiles = [ (f'geomet-mapfile_{layer}_layer', st.get_key(f'{layer}_layer')) ] else: mapfiles = [ (key, st.get_key(f'{key}', raw=True)) for key in st.list_keys('geomet-mapfile*_layer') ] for name, mapfile in mapfiles: LOGGER.debug(f'Updating {name} in store.') updated_mapfile = find_replace_wms_timedefault(name, mapfile) st.set_key(name, updated_mapfile, raw=True) return True
def list_keys(ctx, raw, pattern=None): """list all keys in store""" provider_def = {'type': STORE_TYPE, 'url': STORE_URL} st = load_plugin('store', provider_def) try: pattern = 'geomet-mapfile*{}'.format(pattern if pattern else '') if raw: keys = st.list_keys(pattern) else: keys = [ remove_prefix(key, 'geomet-mapfile_') for key in st.list_keys(pattern) ] click.echo(json_pretty_print(keys)) except StoreError as err: raise click.ClickException(err) click.echo('Done')
def application(env, start_response): """WSGI application for WMS/WCS""" for key in MAPSERV_ENV: if key in env: os.environ[key] = env[key] else: os.unsetenv(key) layer = None mapfile_ = None request = mapscript.OWSRequest() request.loadParams() lang_ = request.getValueByName('LANG') service_ = request.getValueByName('SERVICE') request_ = request.getValueByName('REQUEST') layers_ = request.getValueByName('LAYERS') layer_ = request.getValueByName('LAYER') coverageid_ = request.getValueByName('COVERAGEID') if lang_ is not None and lang_ in ['f', 'fr', 'fra']: lang = 'fr' else: lang = 'en' if layers_ is not None: layer = layers_ elif layer_ is not None: layer = layer_ elif coverageid_ is not None: layer = coverageid_ else: layer = None if service_ is None: service_ = 'WMS' if layer is not None and len(layer) == 0: layer = None time_error = None LOGGER.debug('service: {}'.format(service_)) LOGGER.debug('language: {}'.format(lang)) if layer == 'GODS': banner = os.path.join(BASEDIR, 'geomet_mapfile/resources', 'other/banner.txt') with open(banner) as fh: start_response('200 OK', [('Content-Type', 'text/plain')]) msg = fh.read() return ['{}'.format(msg).encode()] # fetch mapfile from store or from disk if MAPFILE_STORAGE == 'file': # if a single layer is specified in LAYER param fetch mapfile from disk if layer is not None and ',' not in layer: mapfile_ = '{}/mapfile/geomet-weather-{}.map'.format( BASEDIR, layer) # if mapfile_ is None or its path does not exist if mapfile_ is None or not os.path.exists(mapfile_): mapfile_ = '{}/mapfile/geomet-weather.map'.format(BASEDIR) # if mapfile_ path does not exist set mapfile_ to None if not os.path.exists(mapfile_): mapfile_ = None elif MAPFILE_STORAGE == 'store': st = load_plugin('store', {'type': STORE_TYPE, 'url': STORE_URL}) if layer is not None and ',' not in layer: mapfile_ = st.get_key('{}_mapfile'.format(layer)) if mapfile_ is None: mapfile_ = st.get_key('geomet-weather_mapfile') # if no mapfile at all is found return a Unsupported service exception if not mapfile_: start_response('400 Bad Request', [('Content-Type', 'application/xml')]) msg = 'Unsupported service' return [SERVICE_EXCEPTION.format(msg).encode()] # if requesting GetCapabilities for entire service, return cache if request_ == 'GetCapabilities' and layer is None: LOGGER.debug('Requesting global mapfile') if service_ == 'WMS': filename = 'geomet-weather-ogc-wms-1.3.0-capabilities-{}.xml'.format( lang) # noqa cached_caps = os.path.join(BASEDIR, 'mapfile', filename) if os.path.isfile(cached_caps): start_response('200 OK', [('Content-Type', 'application/xml')]) with io.open(cached_caps, 'rb') as fh: return [fh.read()] else: LOGGER.debug('Requesting layer mapfile') if os.path.exists(mapfile_): # read mapfile from filepath LOGGER.debug('Loading mapfile {} from disk'.format(mapfile_)) mapfile = mapscript.mapObj(mapfile_) else: # read mapfile from string returned from store LOGGER.debug('Loading {}_mapfile from store'.format( layer if layer else 'geomet-mapfile')) mapfile = mapscript.fromstring(mapfile_) layerobj = mapfile.getLayerByName(layer) time = request.getValueByName('TIME') ref_time = request.getValueByName('DIM_REFERENCE_TIME') if any(time_param == '' for time_param in [time, ref_time]): time_error = "Valeur manquante pour la date ou l'heure / Missing value for date or time" # noqa start_response('200 OK', [('Content-type', 'text/xml')]) return [SERVICE_EXCEPTION.format(time_error).encode()] if time is None: time = layerobj.getMetaData('wms_timedefault') if ref_time is None: ref_time = layerobj.getMetaData('wms_reference_time_default') try: filepath, url = get_data_path(layer, time, ref_time) except TileNotFoundError as err: LOGGER.error(err) time_error = ( 'NoMatch: Date et heure invalides / Invalid date and time') start_response('200 OK', [('Content-type', 'text/xml')]) return [SERVICE_EXCEPTION.format(time_error).encode()] try: if request_ in ['GetMap', 'GetFeatureInfo']: if all([ filepath.startswith(os.sep), not os.path.isfile(filepath) ]): LOGGER.debug('File is not on disk: {}'.format(filepath)) if not ALLOW_LAYER_DATA_DOWNLOAD: LOGGER.error('layer data downloading not allowed') _error = 'data not found' start_response('500 Internal Server Error', [('Content-type', 'text/xml')]) return [SERVICE_EXCEPTION.format(_error).encode()] if not os.path.exists(os.path.dirname(filepath)): LOGGER.debug('Creating the filepath') os.makedirs(os.path.dirname(filepath)) LOGGER.debug('Downloading url: {}'.format(url)) with urlopen(url) as r: with open(filepath, 'wb') as fh: fh.write(r.read()) layerobj.data = filepath except ValueError as err: LOGGER.error(err) _error = ( 'NoApplicableCode: Donnée non disponible / Data not available') start_response('500 Internal Server Error', [('Content-type', 'text/xml')]) return [SERVICE_EXCEPTION.format(_error).encode()] if request_ == 'GetCapabilities' and lang == 'fr': metadata_lang(mapfile, layer.split(','), lang) mapscript.msIO_installStdoutToBuffer() # giving we don't use properly use tileindex due to performance issues # we need to remove the time parameter from the request for uvraster layer if 'time' in env['QUERY_STRING'].lower(): query_string = env['QUERY_STRING'].split('&') query_string = [x for x in query_string if 'time' not in x.lower()] request.loadParamsFromURL('&'.join(query_string)) else: request.loadParamsFromURL(env['QUERY_STRING']) try: LOGGER.debug('Dispatching OWS request') mapfile.OWSDispatch(request) except (mapscript.MapServerError, IOError) as err: # let error propagate to service exception LOGGER.error(err) pass headers = mapscript.msIO_getAndStripStdoutBufferMimeHeaders() headers_ = [ ('Content-Type', headers['Content-Type']), ] content = mapscript.msIO_getStdoutBufferBytes() start_response('200 OK', headers_) return [content]
def generate(ctx, layer, map_, output): """generate mapfile""" st = load_plugin('store', PROVIDER_DEF) output_dir = f'{BASEDIR}{os.sep}mapfile' all_layers = [] if not os.path.exists(output_dir): os.makedirs(output_dir) with open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) with open(CONFIG) as fh: cfg = load(fh, Loader=CLoader) if layer is not None: mapfiles = {layer: cfg['layers'][layer]} else: mapfiles = cfg['layers'] # set PROJ_LIB path mapfile['config']['proj_lib'] = os.path.join(BASEDIR, 'geomet_mapfile', 'resources', 'mapserv') mapfile['web']['metadata'] = gen_web_metadata(mapfile, cfg['metadata'], URL) for key, value in mapfiles.items(): mapfile_copy = deepcopy(mapfile) mapfile_copy['layers'] = [] layers = gen_layer(key, value) if layers: for lyr in layers: mapfile_copy['layers'].append(lyr) all_layers.append(lyr) # TODO: simplify if 'outputformats' in value['forecast_model']: mapfile_copy['outputformats'] = [ format_ for format_ in mapfile_copy['outputformats'] # noqa if format_['name'] in value['forecast_model'] ['outputformats'] ] # noqa # TODO: simplify if 'symbols' in value: mapfile_copy['symbols'] = [ symbol for symbol in mapfile_copy['symbols'] if symbol['name'] in # noqa value['symbols'] or any(symbol_ in symbol['name'] # noqa for symbol_ in value['symbols']) ] # noqa else: mapfile_copy['symbols'] = [] if map_: filename = f'geomet-weather-{key}.map' else: filename = f'geomet-weather-{key}_layer.map' filepath = f'{output_dir}{os.sep}{filename}' if output == 'file': with open(filepath, 'w', encoding='utf-8') as fh: if map_: mappyfile.dump(mapfile_copy, fh) else: mappyfile.dump(mapfile_copy['layers'], fh) elif output == 'store': if map_: st.set_key(f'{key}_mapfile', mappyfile.dumps(mapfile_copy)) else: st.set_key(f'{key}_layer', mappyfile.dumps(mapfile_copy['layers'])) if layer is None: # generate entire mapfile mapfile['layers'] = all_layers if output == 'file': filename = 'geomet-weather.map' filepath = f'{output_dir}{os.sep}{filename}' with open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) elif output == 'store': st.set_key('geomet-weather_mapfile', mappyfile.dumps(mapfile)) epsg_file = os.path.join(THISDIR, 'resources', 'mapserv', 'epsg') shutil.copy2(epsg_file, os.path.join(BASEDIR, output_dir))
def layer_time_config(layer_name): """ # TODO: add description :param layer_name: name of layer :returns: `dict` of time values for layer (default time, time extent, default model run, model run extent) """ st = load_plugin('store', PROVIDER_DEF) time_extent = st.get_key(f'geomet-data-registry_{layer_name}_time_extent', raw=True) default_time = st.get_key( f'geomet-data-registry_{layer_name}_default_time', raw=True) model_run_extent = st.get_key( f'geomet-data-registry_{layer_name}_model_run_extent', raw=True) default_model_run = st.get_key( f'geomet-data-registry_{layer_name}_default_model_run', raw=True) if not time_extent: LOGGER.error(f'Could not retrieve {layer_name} time extent' f' information from store. Skipping mapfile generation' f' for this layer.') return False intervals = [] if ((time_extent and default_time) and not (model_run_extent and default_model_run)): nearest_interval = default_time else: start, end, interval = time_extent.split('/') start = datetime.strptime(start, DATEFORMAT) end = datetime.strptime(end, DATEFORMAT) regex_result = re.search('^P(T?)(\\d+)(.)', interval) time_ = regex_result.group(1) duration = regex_result.group(2) unit = regex_result.group(3) if time_ is None: # this means the duration is a date if unit == 'M': relative_delta = relativedelta(months=int(duration)) else: # this means the duration is a time if unit == 'H': relative_delta = timedelta(hours=int(duration)) elif unit == 'M': relative_delta = timedelta(minutes=int(duration)) if start != end and relative_delta != timedelta(minutes=0): while start <= end: intervals.append(start) start += relative_delta nearest_interval = min( intervals, key=lambda interval: abs(interval - NOW)).strftime( DATEFORMAT) # noqa else: nearest_interval = end.strftime(DATEFORMAT) time_config_dict = { 'default_time': nearest_interval, 'available_intervals': intervals, 'time_extent': time_extent, 'model_run_extent': model_run_extent, 'default_model_run': default_model_run } return time_config_dict
def generate_mapfile(layer=None, output='file', use_includes=True): st = load_plugin('store', PROVIDER_DEF) time_errors = False output_dir = f'{BASEDIR}{os.sep}mapfile' all_layers = [] if not os.path.exists(output_dir): os.makedirs(output_dir) with open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) with open(CONFIG) as fh: cfg = load(fh, Loader=CLoader) if layer is not None: mapfiles = {layer: cfg['layers'][layer]} else: mapfiles = cfg['layers'] # set PROJ_LIB path mapfile['config']['proj_lib'] = os.path.join( THISDIR, 'resources', 'mapserv' ) mapfile['web']['metadata'] = gen_web_metadata( mapfile, cfg['metadata'], URL ) for key, value in mapfiles.items(): mapfile_copy = deepcopy(mapfile) mapfile_copy['layers'] = [] try: lyr = gen_layer(key, value) except LayerTimeConfigError: lyr = None time_errors = True if lyr: mapfile_copy['layers'].append(lyr) # TODO: simplify if 'outputformats' in value['forecast_model']: mapfile_copy['outputformats'] = [ format_ for format_ in mapfile_copy['outputformats'] if format_['name'] in value['forecast_model']['outputformats'] ] # TODO: simplify if 'symbols' in value: mapfile_copy['symbols'] = [ symbol for symbol in mapfile_copy['symbols'] if symbol['name'] in value['symbols'] or any( symbol_ in symbol['name'] for symbol_ in value['symbols'] ) ] else: mapfile_copy['symbols'] = [] layer_only_filepath = ( f'{output_dir}{os.sep}geomet-weather-{key}_layer.map' ) # collect and write LAYER-only mapfile to disk in order to use # in global mapfile with INCLUDE directive all_layers.append(layer_only_filepath) with open(layer_only_filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile_copy['layers'], fh) if output == 'file' and mapfile_copy['layers']: mapfile_filepath = f'{output_dir}{os.sep}geomet-weather-{key}.map' with open(mapfile_filepath, 'w', encoding='utf-8') as fh: if use_includes: mapfile['include'] = [layer_only_filepath] mappyfile.dump(mapfile, fh) else: mappyfile.dump(mapfile_copy, fh) elif output == 'store' and mapfile_copy['layers']: st.set_key(f'{key}_mapfile', mappyfile.dumps(mapfile_copy)) st.set_key(f'{key}_layer', mappyfile.dumps(mapfile_copy['layers'])) if layer is None: # generate entire mapfile # always write global mapfile to disk for caching purposes mapfile['include'] = all_layers filename = 'geomet-weather.map' filepath = f'{output_dir}{os.sep}{filename}' with open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) # also write to store if required if output == 'store': st.set_key('geomet-weather_mapfile', mappyfile.dumps(mapfile)) # returns False if time keys could not be retrieved (meaning empty/no # layer mapfiles generated) if time_errors: return False return True