def test_dump(): s = """MAP NAME "TEST" END""" d = mappyfile.loads(s) with tempfile.NamedTemporaryFile(mode="w+", delete=False) as fp: mappyfile.dump(d, fp) with open(fp.name) as fp: d = mappyfile.load(fp) assert d["name"] == "TEST"
def generate(ctx, layer, map_, output): """generate mapfile""" st = load_plugin('store', PROVIDER_DEF) output_dir = f'{BASEDIR}{os.sep}mapfile' all_layers = [] if not os.path.exists(output_dir): os.makedirs(output_dir) with open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) with open(CONFIG) as fh: cfg = load(fh, Loader=CLoader) if layer is not None: mapfiles = {layer: cfg['layers'][layer]} else: mapfiles = cfg['layers'] # set PROJ_LIB path mapfile['config']['proj_lib'] = os.path.join(BASEDIR, 'geomet_mapfile', 'resources', 'mapserv') mapfile['web']['metadata'] = gen_web_metadata(mapfile, cfg['metadata'], URL) for key, value in mapfiles.items(): mapfile_copy = deepcopy(mapfile) mapfile_copy['layers'] = [] layers = gen_layer(key, value) if layers: for lyr in layers: mapfile_copy['layers'].append(lyr) all_layers.append(lyr) # TODO: simplify if 'outputformats' in value['forecast_model']: mapfile_copy['outputformats'] = [ format_ for format_ in mapfile_copy['outputformats'] # noqa if format_['name'] in value['forecast_model'] ['outputformats'] ] # noqa # TODO: simplify if 'symbols' in value: mapfile_copy['symbols'] = [ symbol for symbol in mapfile_copy['symbols'] if symbol['name'] in # noqa value['symbols'] or any(symbol_ in symbol['name'] # noqa for symbol_ in value['symbols']) ] # noqa else: mapfile_copy['symbols'] = [] if map_: filename = f'geomet-weather-{key}.map' else: filename = f'geomet-weather-{key}_layer.map' filepath = f'{output_dir}{os.sep}{filename}' if output == 'file': with open(filepath, 'w', encoding='utf-8') as fh: if map_: mappyfile.dump(mapfile_copy, fh) else: mappyfile.dump(mapfile_copy['layers'], fh) elif output == 'store': if map_: st.set_key(f'{key}_mapfile', mappyfile.dumps(mapfile_copy)) else: st.set_key(f'{key}_layer', mappyfile.dumps(mapfile_copy['layers'])) if layer is None: # generate entire mapfile mapfile['layers'] = all_layers if output == 'file': filename = 'geomet-weather.map' filepath = f'{output_dir}{os.sep}{filename}' with open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) elif output == 'store': st.set_key('geomet-weather_mapfile', mappyfile.dumps(mapfile)) epsg_file = os.path.join(THISDIR, 'resources', 'mapserv', 'epsg') shutil.copy2(epsg_file, os.path.join(BASEDIR, output_dir))
def generate(ctx, service, layer): """generate mapfile""" output_dir = '{}{}mapfile'.format(BASEDIR, os.sep) template_dir = '{}{}mapfile{}template'.format(BASEDIR, os.sep, os.sep) all_layers = [] if not os.path.exists(output_dir): os.makedirs(output_dir) if not os.path.exists(template_dir): os.makedirs(template_dir) with io.open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with io.open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) if OWS_LOG is not None: mapfile['config'] = {'ms_errorfile': OWS_LOG} if OWS_DEBUG is not None: mapfile['debug'] = int(OWS_DEBUG) with io.open(CONFIG) as fh: cfg = yaml.load(fh, Loader=CLoader) if layer is not None: mapfiles = {layer: cfg['layers'][layer]} else: mapfiles = cfg['layers'] mapfile['web']['metadata'] = gen_web_metadata(mapfile, cfg['metadata'], service, URL) for key, value in mapfiles.items(): mapfile['layers'] = [] template_name = 'template-{}.js'.format(key) template_path = '{}{}{}'.format(template_dir, os.sep, template_name) with io.open(template_path, 'w', encoding='utf-8') as fh: template_dir = os.path.join(THISDIR, 'resources', 'mapserv', 'templates') stations_layers = [ 'CLIMATE.STATIONS', 'HYDROMETRIC.STATIONS', 'AHCCD.STATIONS' ] if key not in stations_layers: trf = os.path.join(template_dir, 'TEMPLATE_RASTER.json') with io.open(trf, encoding='utf-8') as template_raster: template_raster = template_raster.read().replace('{}', key) fh.write(template_raster) else: template_tmp_name = 'TEMPLATE_{}.json'.format(key) tvf = os.path.join(template_dir, template_tmp_name) with io.open(tvf, encoding='utf-8') as template_vector: template_vector = template_vector.read().replace('{}', key) fh.write(template_vector) layers = gen_layer(key, value, template_path, service) for lyr in layers: mapfile['layers'].append(lyr) all_layers.append(lyr) filename = 'geomet-climate-{}-{}.map'.format(service, key) filepath = '{}{}{}'.format(output_dir, os.sep, filename) for i in mapfile['outputformats']: if i['name'] == 'GeoJSON': i['formatoption'] = ['FILE={}'.format(template_path)] with io.open(filepath, 'w') as fh: mappyfile.dump(mapfile, fh) if layer is None: # generate entire mapfile metadata_dict = mapfile['web']['metadata'].copy() for lang_ in ['en', 'fr']: lang_map = copy.deepcopy(mapfile) lang_map['layers'] = all_layers filename = 'geomet-climate-{}-{}.map'.format(service, lang_) filepath = '{}{}{}'.format(output_dir, os.sep, filename) if lang_ == 'fr': for metadata in metadata_dict: if metadata.endswith('_{}'.format(lang_)): key_ = metadata.replace('_{}'.format(lang_), '') value_ = mapfile['web']['metadata'][metadata] lang_map['web']['metadata'][key_] = value_ for lyr_ in range(0, len(lang_map['layers'])): lm = lang_map['layers'][lyr_]['metadata'] if 'ows_title' in lm: lm['ows_layer_group'] = \ lm['ows_layer_group_{}'.format(lang_)] lm['ows_title'] = \ lm['ows_title_{}'.format(lang_)] with io.open(filepath, 'w') as fh: mappyfile.dump(lang_map, fh) epsg_file = os.path.join(THISDIR, 'resources', 'mapserv', 'epsg') shutil.copy2(epsg_file, os.path.join(BASEDIR, 'mapfile'))
def generate(ctx, lang, service, layer): """generate mapfile""" output_dir = '{}{}mapfile'.format(BASEDIR, os.sep) template_dir = '{}{}mapfile{}template'.format(BASEDIR, os.sep, os.sep) all_layers = [] if lang is None or service is None: raise click.UsageError('Missing arguments') if not os.path.exists(output_dir): os.makedirs(output_dir) if not os.path.exists(template_dir): os.makedirs(template_dir) with io.open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with io.open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) with io.open(CONFIG) as fh: cfg = yaml.load(fh, Loader=CLoader) if layer is not None: mapfiles = { layer: cfg['layers'][layer] } else: mapfiles = cfg['layers'] mapfile['web']['metadata'] = gen_web_metadata(mapfile, cfg['metadata'], lang, service, URL) for key, value in mapfiles.items(): mapfile['layers'] = [] template_name = 'template-{}.js'.format(key) template_path = '{}{}{}'.format(template_dir, os.sep, template_name) with io.open(template_path, 'w', encoding='utf-8') as fh: template_dir = os.path.join(THISDIR, 'resources', 'mapserv', 'templates') stations_layers = ['CLIMATE.STATIONS', 'HYDROMETRIC.STATIONS', 'AHCCD.STATIONS'] if key not in stations_layers: trf = os.path.join(template_dir, 'TEMPLATE_RASTER.json') with io.open(trf, encoding='utf-8') as template_raster: template_raster = template_raster.read().replace('{}', key) fh.write(template_raster) else: template_tmp_name = 'TEMPLATE_{}.json'.format(key) tvf = os.path.join(template_dir, template_tmp_name) with io.open(tvf, encoding='utf-8') as template_vector: template_vector = template_vector.read().replace('{}', key) fh.write(template_vector) layers = gen_layer(key, value, lang, template_path, service) for lyr in layers: mapfile['layers'].append(lyr) all_layers.append(lyr) filename = 'geomet-climate-{}-{}-{}.map'.format(service, key, lang) filepath = '{}{}{}'.format(output_dir, os.sep, filename) for i in mapfile['outputformats']: if i['name'] == 'GeoJSON': i['formatoption'] = ['FILE={}'.format(template_path)] with io.open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) if layer is None: # generate entire mapfile filename = 'geomet-climate-{}-{}.map'.format(service, lang) filepath = '{}{}{}'.format(output_dir, os.sep, filename) mapfile['layers'] = all_layers with io.open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) epsg_file = os.path.join(THISDIR, 'resources', 'mapserv', 'epsg') shutil.copy2(epsg_file, os.path.join(BASEDIR, 'mapfile'))
def generate_mapfile(layer=None, output='file', use_includes=True): st = load_plugin('store', PROVIDER_DEF) time_errors = False output_dir = f'{BASEDIR}{os.sep}mapfile' all_layers = [] if not os.path.exists(output_dir): os.makedirs(output_dir) with open(MAPFILE_BASE) as fh: mapfile = json.load(fh, object_pairs_hook=OrderedDict) symbols_file = os.path.join(THISDIR, 'resources/mapserv/symbols.json') with open(symbols_file) as fh2: mapfile['symbols'] = json.load(fh2) with open(CONFIG) as fh: cfg = load(fh, Loader=CLoader) if layer is not None: mapfiles = {layer: cfg['layers'][layer]} else: mapfiles = cfg['layers'] # set PROJ_LIB path mapfile['config']['proj_lib'] = os.path.join( THISDIR, 'resources', 'mapserv' ) mapfile['web']['metadata'] = gen_web_metadata( mapfile, cfg['metadata'], URL ) for key, value in mapfiles.items(): mapfile_copy = deepcopy(mapfile) mapfile_copy['layers'] = [] try: lyr = gen_layer(key, value) except LayerTimeConfigError: lyr = None time_errors = True if lyr: mapfile_copy['layers'].append(lyr) # TODO: simplify if 'outputformats' in value['forecast_model']: mapfile_copy['outputformats'] = [ format_ for format_ in mapfile_copy['outputformats'] if format_['name'] in value['forecast_model']['outputformats'] ] # TODO: simplify if 'symbols' in value: mapfile_copy['symbols'] = [ symbol for symbol in mapfile_copy['symbols'] if symbol['name'] in value['symbols'] or any( symbol_ in symbol['name'] for symbol_ in value['symbols'] ) ] else: mapfile_copy['symbols'] = [] layer_only_filepath = ( f'{output_dir}{os.sep}geomet-weather-{key}_layer.map' ) # collect and write LAYER-only mapfile to disk in order to use # in global mapfile with INCLUDE directive all_layers.append(layer_only_filepath) with open(layer_only_filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile_copy['layers'], fh) if output == 'file' and mapfile_copy['layers']: mapfile_filepath = f'{output_dir}{os.sep}geomet-weather-{key}.map' with open(mapfile_filepath, 'w', encoding='utf-8') as fh: if use_includes: mapfile['include'] = [layer_only_filepath] mappyfile.dump(mapfile, fh) else: mappyfile.dump(mapfile_copy, fh) elif output == 'store' and mapfile_copy['layers']: st.set_key(f'{key}_mapfile', mappyfile.dumps(mapfile_copy)) st.set_key(f'{key}_layer', mappyfile.dumps(mapfile_copy['layers'])) if layer is None: # generate entire mapfile # always write global mapfile to disk for caching purposes mapfile['include'] = all_layers filename = 'geomet-weather.map' filepath = f'{output_dir}{os.sep}{filename}' with open(filepath, 'w', encoding='utf-8') as fh: mappyfile.dump(mapfile, fh) # also write to store if required if output == 'store': st.set_key('geomet-weather_mapfile', mappyfile.dumps(mapfile)) # returns False if time keys could not be retrieved (meaning empty/no # layer mapfiles generated) if time_errors: return False return True