def test_geojson_generation(self): # Process config file test_artifact_path = os.path.join(self.main_artifact_path, 'geojson') config = self.parse_vector_config(self.geojson_test_config, test_artifact_path) # Open input shapefile and get stats try: with fiona.open(config['input_files'][0]) as geojson: origin_num_features = len(list(geojson)) except fiona.errors.FionaValueError: self.fail( "Can't open input geojson {0}. Make sure it's valid.".format( config['input_files'][0])) # Run vectorgen os.chdir(test_artifact_path) cmd = 'oe_vectorgen -c ' + self.geojson_test_config run_command(cmd, ignore_warnings=True) # Check the output output_file = os.path.join(config['output_dir'], config['prefix'] + '.json') try: with fiona.open(output_file) as geojson: self.assertEqual( origin_num_features, len(list(geojson)), "Feature count between input GeoJSON {0} and output GeoJSON {1} differs. There is a problem with the conversion process." .format(config['input_files'][0], output_file)) except IOError: self.fail( "Expected output geojson file {0} doesn't appear to have been created." .format(output_file)) except fiona.errors.FionaValueError: self.fail("Bad output geojson file {0}.".format(output_file))
def test_vector_data_type_inclusion(self): """ Checks that the <VectorType> tag is being read and included in the output Mapfile. """ if DEBUG: print '\nTESTING THAT VECTOR DATA TYPE IS INCLUDED IN GENERATED MAPFILE...' layer_config = os.path.join( self.testfiles_path, 'conf/test_vector_mapfile_type_inclusion.xml') config = get_layer_config(layer_config, self.archive_config) make_dir_tree(config['mapfile_staging_location']) cmd = 'oe_configure_layer -l {0} -z -e -a {1} -c {2} -p {3} -m {4} -n -w -x --create_mapfile'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd, ignore_warnings=True) output_mapfile = os.path.join( config['mapfile_location'], config['mapfile_location_basename'] + '.map') self.assertTrue( os.path.exists(output_mapfile), "Vector Mapfile Type addition test -- mapfile not created") with open(output_mapfile) as mapfile: mapfile_type_string = str('TYPE\t' + config['vector_type'].upper()) type_string_exists = mapfile_type_string in mapfile.read() self.assertTrue(type_string_exists, 'Style file stuff not found in output mapfile')
def test_vector_mapfile_style_inclusion(self): """ Checks that the style snippet file indicated by <VectorStyleFile> is included in the output Mapfile. """ if DEBUG: print '\nTESTING THAT VECTOR STYLES ARE INCLUDED IN GENERATED MAPFILE...' layer_config = os.path.join( self.testfiles_path, 'conf/test_vector_mapfile_style_inclusion.xml') config = get_layer_config(layer_config, self.archive_config) make_dir_tree(config['mapfile_staging_location']) cmd = 'oe_configure_layer -l {0} -z -e -a {1} -c {2} -p {3} -m {4} -n -w -x --create_mapfile'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd, ignore_warnings=True) output_mapfile = os.path.join( config['mapfile_location'], config['mapfile_location_basename'] + '.map') self.assertTrue( os.path.exists(output_mapfile), "Vector Mapfile Style addition test -- mapfile not created") with open(output_mapfile) as mapfile: with open( os.path.join(self.testfiles_path, config['vector_style_file'])) as style_file: styles_exist = style_file.read() in mapfile.read() self.assertTrue(styles_exist, 'Style file stuff not found in output mapfile')
def test_layer_config_legends(self): # Set config files and reference hash for checking empty tile layer_config = os.path.join(self.testfiles_path, 'conf/test_legend_generation.xml') h_legend_ref_hash = '1f4bca87509a8fd82c416fdd5f3eff87' v_legend_ref_hash = '4fdb03600a8e5c8b15321dd2505d6838' config = get_layer_config(layer_config, self.archive_config) # Create legend, archive, and colormap dirs make_dir_tree(config['legend_location']) make_dir_tree(config['colormap_locations'][0].firstChild.nodeValue) make_dir_tree(config['archive_location']) make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) # Copy colormap to colormaps dir copy( os.path.join(self.testfiles_path, 'conf/' + config['colormaps'][0].firstChild.nodeValue), config['colormap_locations'][0].firstChild.nodeValue) # Run layer config tool cmd = 'oe_configure_layer -l{0} --skip_empty_tiles -g -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd) # Get hashes of generated legends try: with open( os.path.join(config['legend_location'], config['prefix'] + '_H.svg'), 'r') as f: h_legend_hash = get_file_hash(f) except OSError: raise ValueError('Horizontal legend not generated') try: with open( os.path.join(config['legend_location'], config['prefix'] + '_V.svg'), 'r') as f: v_legend_hash = get_file_hash(f) except OSError: raise ValueError('Vertical legend not generated') # Cleanup rmtree(config['wmts_gc_path']) rmtree(config['colormap_locations'][0].firstChild.nodeValue) rmtree(config['legend_location']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) # Check if hashes are kosher self.assertEqual( h_legend_ref_hash, h_legend_hash, 'Horizontal legend generated does not match expected.') self.assertEqual(v_legend_ref_hash, v_legend_hash, 'Vertical legend generated does not match expected.')
def setUp(self): self.testdata_path = os.path.join(os.getcwd(), 'legends_test_data/') test_config = open(self.testdata_path + "colormaps.json", "r") self.colormaps = eval(test_config.read()) self.colormap_files = [] test_config.close() for key, value in self.colormaps.iteritems(): colormap = self.testdata_path + key + ".xml" self.colormap_files.append(colormap) if os.path.isfile(colormap) == False: run_command("curl -o " + colormap + " " + value['colormap'])
def test_versioned_colormaps(self): # Set locations of the config files we're using for this test layer_config = os.path.join(self.testfiles_path, 'conf/test_versioned_colormaps.xml') test_metadata = ( '<ows:Metadata xlink:type="simple" xlink:role="http://earthdata.nasa.gov/gibs/metadata-type/colormap" xlink:href="http://localhost/colormaps/2.0/MODIS_Aqua_Aerosol-GIBS_2.0.xml" xlink:title="GIBS Color Map: Data - RGB Mapping"/>', '<ows:Metadata xlink:type="simple" xlink:role="http://earthdata.nasa.gov/gibs/metadata-type/colormap/1.0" xlink:href="http://localhost/colormaps/1.0//MODIS_Aqua_Aerosol-GIBS_1.0.xml" xlink:title="GIBS Color Map: Data - RGB Mapping"/>' '<ows:Metadata xlink:type="simple" xlink:role="http://earthdata.nasa.gov/gibs/metadata-type/colormap/2.0" xlink:href="http://localhost/colormaps/2.0//MODIS_Aqua_Aerosol-GIBS_2.0.xml" xlink:title="GIBS Color Map: Data - RGB Mapping"/>' ) config = get_layer_config(layer_config, self.archive_config) # Create colormap locations and copy colormap test file for location in config['colormap_locations']: make_dir_tree(location.firstChild.nodeValue) colormap = next(colormap.firstChild.nodeValue for colormap in config['colormaps'] if colormap.attributes['version'].value == location.attributes['version'].value) copy(os.path.join(self.testfiles_path, 'conf/' + colormap), location.firstChild.nodeValue) # Create paths for data and GC make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['archive_location']) # Run layer config tool cmd = 'oe_configure_layer -l {0} --skip_empty_tiles -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd) # Check to see if all required metadata lines are in the getCapabilities gc_file = os.path.join(config['wmts_gc_path'], 'getCapabilities.xml') metadata_pass = all(line for line in test_metadata if find_string(gc_file, line)) self.assertTrue( metadata_pass, "Can't find all the proper versioned colormap metadata in the WMTS GetCapabilities file or GC file was not created." ) # Cleanup [ rmtree(path) for path in [ path.firstChild.nodeValue for path in config['colormap_locations'] ] ] rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location'])
def setUp(self): self.colormaps_json = "colormaps.el7.json" if "el6" in platform.release(): # Result varies on release self.colormaps_json = self.colormaps_json.replace("el7", "el6") self.testdata_path = os.path.join(os.getcwd(), 'legends_test_data/') test_config = open(self.testdata_path + self.colormaps_json, "r") self.colormaps = eval(test_config.read()) self.colormap_files = [] test_config.close() for key, value in self.colormaps.iteritems(): colormap = self.testdata_path + key + ".xml" self.colormap_files.append(colormap) if os.path.isfile(colormap) == False: run_command("curl -o " + colormap + " " + value['colormap'])
def test_MVT_MRF_generation(self): # Process config file test_artifact_path = os.path.join(self.main_artifact_path, 'mvt_mrf') config = self.parse_vector_config(self.mrf_test_config, test_artifact_path) # Run vectorgen os.chdir(test_artifact_path) cmd = 'oe_vectorgen -c ' + self.mrf_test_config run_command(cmd, ignore_warnings=True) # Get index of first, second-to-last, and last tile in MRF with open( os.path.join(config['output_dir'], config['prefix'] + '.idx'), 'rb') as idx: first_byte = idx.read(16) idx.seek(-32, 2) penultimate_byte = idx.read(16) last_byte = idx.read(16) top_tile_feature_count = 0 for byte in (first_byte, penultimate_byte, last_byte): tile_buffer = StringIO.StringIO() offset = struct.unpack('>q', byte[0:8])[0] size = struct.unpack('>q', byte[8:16])[0] with open( os.path.join(config['output_dir'], config['prefix'] + '.pvt'), 'rb') as pvt: pvt.seek(offset) tile_buffer.write(pvt.read(size)) tile_buffer.seek(0) # Check to see if extracted files are valid zip files and valid MVT tiles try: unzipped_tile = gzip.GzipFile(fileobj=tile_buffer) tile_data = unzipped_tile.read() except IOError: self.fail("Invalid tile found in MRF -- can't be unzipped.") try: tile = mapbox_vector_tile.decode(tile_data) except: self.fail( "Can't decode MVT tile -- bad protobuffer or wrong MVT structure" ) # Check the top 2 tiles to see if they have any features (they should) if byte != first_byte: top_tile_feature_count += len(tile[tile.keys()[0]]['features']) self.assertTrue( top_tile_feature_count, "Top two files contain no features -- MRF likely was not created correctly." )
def setUpClass(self): # Get the path of the test data -- we assume that the script is in the parent dir of the data dir testdata_path = os.path.join(os.getcwd(), 'mod_onearth_test_data') wmts_configs = ('wmts_cache_configs', 'wmts_cache_staging', 'test_imagery/cache_all_wmts.config') twms_configs = ('twms_cache_configs', 'twms_cache_staging', 'test_imagery/cache_all_twms.config') self.image_files_path = os.path.join(testdata_path, 'test_imagery') self.test_apache_config = os.path.join(testdata_path, 'oe_test.conf') self.mapfile = os.path.join(testdata_path, 'epsg4326.map') # create links for mapserv mapserver_location = '/usr/bin/mapserv' if not os.path.exists(testdata_path + '/wms_endpoint'): os.makedirs(testdata_path + '/wms_endpoint') if not os.path.exists(testdata_path + '/wfs_endpoint'): os.makedirs(testdata_path + '/wfs_endpoint') if os.path.isfile(mapserver_location): if os.path.islink(testdata_path + '/wms_endpoint/mapserv') == False: os.symlink(mapserver_location, testdata_path + '/wms_endpoint/mapserv') if os.path.islink(testdata_path + '/wfs_endpoint/mapserv') == False: os.symlink(mapserver_location, testdata_path + '/wfs_endpoint/mapserv') else: raise IOError(mapserver_location + 'does not exist') for template_dir, staging_dir, cache_config in (wmts_configs, twms_configs): # Make staging cache files dir template_path = os.path.join(testdata_path, template_dir) staging_path = os.path.join(testdata_path, staging_dir) cache_path = os.path.join(testdata_path, cache_config) make_dir_tree(staging_path) # Copy XML/MRF files to staging cache files dir, swapping in the location of the imagery files for file in [f for f in os.listdir(template_path) if os.path.isfile(os.path.join(template_path, f))]: file_text_replace(os.path.join(template_path, file), os.path.join(staging_path, file), '{cache_path}', self.image_files_path) # Run oe_create_cache_config to make the cache config files cmd = 'oe_create_cache_config -cbd {0} {1}'.format(staging_path, cache_path) run_command(cmd) rmtree(staging_path) # Put the correct path into the mapfile file_text_replace(self.mapfile + ".default", self.mapfile, '{cache_path}', testdata_path) # Put the correct path into the Apache config (oe_test.conf) file_text_replace(self.test_apache_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_apache_config)), '{cache_path}', testdata_path) restart_apache()
def setUpClass(self): # Get the path of the test data -- we assume that the script is in the parent dir of the data dir oedata_path = os.path.join(os.getcwd(), 'twms_onearth_test_data') self.testdata_path = os.path.join(oedata_path, 'mod_twms') wmts_configs = ('wmts_cache_configs', 'wmts_cache_staging', 'test_imagery/cache_all_wmts.config') self.image_files_path = os.path.join(oedata_path, 'test_imagery') self.test_oe_config = os.path.join(oedata_path, 'oe_test.conf') self.test_apache_config = os.path.join(self.testdata_path, 'test_twms_err_apache.conf') template_dir, staging_dir, cache_config = wmts_configs # Make staging cache files dir template_path = os.path.join(oedata_path, template_dir) staging_path = os.path.join(oedata_path, staging_dir) cache_path = os.path.join(oedata_path, cache_config) make_dir_tree(staging_path) # Copy XML/MRF files to staging cache files dir, swapping in the location of the imagery files for file in [ f for f in os.listdir(template_path) if os.path.isfile(os.path.join(template_path, f)) ]: file_text_replace(os.path.join(template_path, file), os.path.join(staging_path, file), '{cache_path}', self.image_files_path) # Run oe_create_cache_config to make the cache config files cmd = 'oe_create_cache_config -cbd {0} {1}'.format( staging_path, cache_path) run_command(cmd) rmtree(staging_path) # Put the correct path into the Apache config (oe_test.conf) file_text_replace( self.test_oe_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_oe_config)), '{cache_path}', oedata_path) # Put the correct path into the Apache config (test_twms_err_apache.conf) file_text_replace( self.test_apache_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_apache_config)), '{cache_path}', self.testdata_path) restart_apache()
def test_generate_mrf(self): new_colormaps = self.colormaps.copy() hasher = hashlib.md5() for key, value in self.colormaps.iteritems(): colormap = self.testdata_path + key + ".xml" filename = os.path.splitext(colormap)[0] png_v = oe_generate_legend + " -c " + colormap + " -f png -r vertical -o " + filename + "_v.png" png_h = oe_generate_legend + " -c " + colormap + " -f png -r horizontal -o " + filename + "_h.png" svg_v = oe_generate_legend + " -c " + colormap + " -f svg -r vertical -o " + filename + "_v.svg" svg_h = oe_generate_legend + " -c " + colormap + " -f svg -r horizontal -o " + filename + "_h.svg" run_command(png_v) run_command(png_h) run_command(svg_v) run_command(svg_h) png_v_hash = value["png_v"] png_h_hash = value["png_h"] svg_v_hash = value["svg_v"] svg_h_hash = value["svg_h"] png_v_file = open(filename + "_v.png", "r") png_h_file = open(filename + "_h.png", "r") svg_v_file = open(filename + "_v.svg", "r") svg_v_file_str = "" svg_v_file_str = re.sub('(id="[#A-Za-z0-9]{11,15}")', '', svg_v_file.read()) svg_v_file_str = re.sub('(xlink:href="[#A-Za-z0-9]{12}")', '', svg_v_file_str) svg_v_file_str = re.sub('(clip-path="url\([#A-Za-z0-9]{12}\)")', '', svg_v_file_str) svg_h_file = open(filename + "_h.svg", "r") svg_h_file_str = "" svg_h_file_str = re.sub('(id="[#A-Za-z0-9]{11,15}")', '', svg_h_file.read()) svg_h_file_str = re.sub('(xlink:href="[#A-Za-z0-9]{12}")', '', svg_h_file_str) svg_h_file_str = re.sub('(clip-path="url\([#A-Za-z0-9]{12}\)")', '', svg_h_file_str) hasher.update(png_v_file.read()) new_colormaps[key]["png_v"] = hasher.hexdigest() hasher.update(png_h_file.read()) new_colormaps[key]["png_h"] = hasher.hexdigest() hasher.update(svg_v_file_str) new_colormaps[key]["svg_v"] = hasher.hexdigest() hasher.update(svg_h_file_str) new_colormaps[key]["svg_h"] = hasher.hexdigest() png_v_file.close() png_h_file.close() svg_v_file.close() svg_h_file.close() if png_v_hash != new_colormaps[key]["png_v"]: print 'Vertical PNG legend for ' + key + ' does not match expected.' if png_h_hash != new_colormaps[key]["png_h"]: print 'Horizontal PNG legend for ' + key + ' does not match expected.' if new_colormaps[key]["svg_v"] != svg_v_hash: print 'Vertical SVG legend for ' + key + ' does not match expected.' if new_colormaps[key]["svg_h"] != svg_h_hash: print 'Horizontal SVG legend for ' + key + ' does not match expected.' new_config = open(self.testdata_path + 'new_colormaps.json', 'w') json.dump(new_colormaps, new_config, sort_keys=True, indent=4) new_config.close() self.assertTrue(filecmp.cmp(self.testdata_path + "colormaps.json", self.testdata_path + 'new_colormaps.json'), 'Inconsistent legends found')
def setUpClass(self): # Get paths for test files test_config_path = os.path.join(os.getcwd(), 'mod_twms_test_data/test_twms_err') twms_configs = ('twms_cache_configs', 'twms_cache_staging', 'test_imagery/cache_all_twms.config') self.image_files_path = os.path.join(test_config_path, 'test_imagery') base_apache_config = os.path.join(test_config_path, 'test_twms_err_apache.conf') self.output_apache_config = os.path.join(apache_conf_dir, 'test_twms_err_apache.conf') (template_dir, staging_dir, cache_config) = twms_configs # Make staging cache files dir template_path = os.path.join(test_config_path, template_dir) staging_path = os.path.join(test_config_path, staging_dir) cache_path = os.path.join(test_config_path, cache_config) make_dir_tree(staging_path) # Copy XML/MRF files to staging cache files dir, swapping in the location of the imagery files for file in [ f for f in os.listdir(template_path) if os.path.isfile(os.path.join(template_path, f)) ]: file_text_replace(os.path.join(template_path, file), os.path.join(staging_path, file), '{cache_path}', self.image_files_path) # Run oe_create_cache_config to make the cache config files cmd = 'oe_create_cache_config -cbd {0} {1}'.format( staging_path, cache_path) run_command(cmd) rmtree(staging_path) try: #file_text_replace(base_apache_config, self.output_apache_config, '{testpath}', test_config_path) file_text_replace(base_apache_config, self.output_apache_config, '{cache_path}', test_config_path) except IOError as e: print "Can't write file: {0}. Error: {1}".format( self.output_apache_config, e) restart_apache()
def test_empty_tile_generation(self): # Set config files and reference hash for checking empty tile layer_config = os.path.join(self.testfiles_path, 'conf/test_empty_tile_generation.xml') ref_hash = "e6dc90abcc221cb2f473a0a489b604f6" config = get_layer_config(layer_config, self.archive_config) # Create paths for data and GC make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['archive_location']) # Copy the demo colormap make_dir_tree(config['colormap_locations'][0].firstChild.nodeValue) copy( os.path.join(self.testfiles_path, 'conf/' + config['colormaps'][0].firstChild.nodeValue), config['colormap_locations'][0].firstChild.nodeValue) # Run layer config tool cmd = 'oe_configure_layer -l {0} -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd) # Verify hash with open(config['empty_tile'], 'r') as f: tile_hash = testutils.get_file_hash(f) # Cleanup -- make sure to get rid of staging files rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) rmtree(config['colormap_locations'][0].firstChild.nodeValue) rmtree(config['archive_location']) os.remove(config['empty_tile']) # Check result self.assertEqual( ref_hash, tile_hash, "Generated empty tile does not match what's expected.")
def setUpClass(self): # Get the path of the test data -- we assume that the script is in the parent dir of the data dir testdata_path = os.path.join(os.getcwd(), 'mod_onearth_test_data') wmts_configs = ('wmts_cache_configs', 'wmts_cache_staging', 'test_imagery/cache_all_wmts.config') twms_configs = ('twms_cache_configs', 'twms_cache_staging', 'test_imagery/cache_all_twms.config') self.image_files_path = os.path.join(testdata_path, 'test_imagery') self.test_apache_config = os.path.join(testdata_path, 'oe_test.conf') for template_dir, staging_dir, cache_config in (wmts_configs, twms_configs): # Make staging cache files dir template_path = os.path.join(testdata_path, template_dir) staging_path = os.path.join(testdata_path, staging_dir) cache_path = os.path.join(testdata_path, cache_config) make_dir_tree(staging_path) # Copy XML/MRF files to staging cache files dir, swapping in the location of the imagery files for file in [f for f in os.listdir(template_path) if os.path.isfile(os.path.join(template_path, f))]: file_text_replace(os.path.join(template_path, file), os.path.join(staging_path, file), '{cache_path}', self.image_files_path) # Run oe_create_cache_config to make the cache config files cmd = 'oe_create_cache_config -cbd {0} {1}'.format(staging_path, cache_path) run_command(cmd) rmtree(staging_path) # Put the correct path into the Apache config (oe_test.conf) file_text_replace(self.test_apache_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_apache_config)), '{cache_path}', testdata_path) restart_apache() # Set some handy constant values self.tile_hashes = {'3d5280b13cbabc41676973d26844f310': '1948-03-01', '210964547845bbeb357f62c214128215': '1990-01-01', '403705d851af424b3bf9cafbbf869d0c': '2000-01-01', '4832d6edeed31fad0bd59bbc26d92275': '2000-06-01', '7ea2038a74af2988dc432a614ec94187': '2000-07-03', '03b3cc7adc929dd605d617b7066b30ae': '2000-08-01', '4f24774e71560e15b5ed43fcace2cb29': '2000-09-03', 'fd9e3aa7c12fbf823bd339b92920784e': '2000-12-01', '24f90bd216f6b7ee25501155fcc8ece4': '2001-01-01', '3d12e06c60b379efc41f4b8021ce1e29': '2001-05-09', 'e16d97b41cbb408d2a285788dfc9e3b8': '2002-01-01', 'b64066bafe897f0d2c0fc4a41ae7e052': '2002-12-27', '21634316da8d6e0af3ee4f24643bd72c': '2002-12-01', 'b3639da9334ca5c13072012f9422a03c': '2003-12-01', '172ba954906b3d4f5d6583b3ad88460f': '2004-12-01', 'f4426ab405ce748b57b34859b3811bf6': '2005-01-01', '65e2446b2f779b963d0127f374a36fba': '2005-12-01', 'faf5788ab8e006bbcfe18be80d472840': '2006-12-01', 'd834056e48a95e39f55401eb61f710cd': '2007-12-01', '9a3cf29a5df271c41eefc5c989fd690d': '2008-01-01', 'd03e3e3cdfef2b6e3d1870f26a88fe53': '2008-12-01', '59692a541429c929117c854fe9e423c9': '2009-12-01', '84eba8cdbb26444dbc97e119c0b76728': '2010-01-01', '91206f8c5a4f6fcdcab366ea00a1f53c': '2010-01-09', '9aa3115cde41a8b9c68433741d98a8b4': '2010-12-01', 'dae12a917a5d672c4cce4fdaf4788bf3': '2011-12-01', '5346e958989b57c45919604ecf909f43': '2012-03-11', '92e5d5eef4dc6866b636a49bfae3e463': '2015-01-01', '5d91fa0c5273b2b58c486a15c91b2e78': '2015-01-02', '81b8d855e38e6783f14899ff89a2c878': '2015-10-01', '7f2992ac0986784c28d93840b1e984c4': '2016-02-29', '1571c4d601dfd871e7680e279e6fd39c': '2015-01-12', 'b69307895d6cb654b98d247d710dd806': '2015-12-01', 'ba366ccd45a8f1ae0ed2b65cf67b9787': '2016-01-01', '5e11f1220da2bb6f92d3e1c998f20bcf': 'black'} # URL that will be used to create the snap test requests self.snap_test_url_template = 'http://localhost/onearth/test/wmts/wmts.cgi?layer={0}&tilematrixset=EPSG4326_16km&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fjpeg&TileMatrix=0&TileCol=0&TileRow=0&TIME={1}'
def test_legacy_subdaily_intermittent(self): """ Checks that layer config tool is correctly detecting the period and interval of subdaily layers that have the datetime in their filenames as opposed to the z-index. """ if DEBUG: print '\nTESTING LEGACY SUBDAILY INTERMITTENT PERIOD DETECTION...' layer_config = os.path.join(self.testfiles_path, 'conf/test_subdaily_detect.xml') start_datetime = datetime.datetime(2014, 6, 1, 12) config = get_layer_config(layer_config, self.archive_config) # Set the various period units and lengths we'll be testing test_periods = (('hours', 1), ('hours', 5), ('minutes', 1), ('minutes', 5), ('seconds', 1), ('seconds', 5)) # Test intermittent periods for period_unit, period_length in test_periods: for year_dir in (True, False): # Make the GC dirs make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['wmts_staging_location']) make_dir_tree(config['twms_staging_location']) test_intervals = testutils.create_intermittent_period_test_files( config['archive_location'], period_unit, period_length, 5, start_datetime, prefix=config['prefix'], make_year_dirs=year_dir) # Create debug output message (flatten array w/ itertools) if DEBUG: print '\nTesting with {0} {1} periods'.format( period_length, period_unit) print 'Creating legacy subdaily files with dates: ' for date in itertools.chain.from_iterable(test_intervals): print date.isoformat() # Run layer config command cmd = 'oe_configure_layer -l {0} -z -e -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd, ignore_warnings=True) # Check to see if proper period in GetCapabilities wmts_gc_file = os.path.join(config['wmts_gc_path'], 'getCapabilities.xml') twms_gc_file = os.path.join(config['twms_gc_path'], 'getCapabilities.xml') search_strings = [] for interval in test_intervals: time_string = testutils.get_time_string( interval[0], interval[-1], config) search_string = "<Value>" + time_string + "/PT{0}{1}</Value>".format( period_length, period_unit[0].upper()) search_strings.append(search_string) if DEBUG: print '\n' + 'Searching for strings in GetCapabilities: ' for string in search_strings: print string # Check to see if string exists in the GC files error = "{0} {1} continuous subdaily legacy period detection failed -- not found in WMTS GetCapabilities".format( period_length, period_unit) check_result = all(string for string in search_strings if find_string(wmts_gc_file, string)) # Cleanup rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) rmtree(config['archive_location']) # Check result self.assertTrue(check_result, error)
def test_layer_config_default(self): # Set config files and reference hash for checking empty tile layer_config = os.path.join(self.testfiles_path, 'conf/test_default_behavior.xml') config = get_layer_config(layer_config, self.archive_config) # Make test dirs make_dir_tree(config['archive_location']) make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['legend_location']) # Copy colormaps colormap_location = config['colormap_locations'][ 0].firstChild.nodeValue colormap = config['colormaps'][0].firstChild.nodeValue make_dir_tree(colormap_location) copy(os.path.join(self.testfiles_path, 'conf/' + colormap), colormap_location) # Run layer config tool cmd = 'oe_configure_layer -l {0} -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd) # Get all the test results before running the assertions. We do this because a failure ends the test and makes it impossible to clean up wmts_cache_xml = os.path.join(config['archive_basepath'], 'cache_all_wmts.xml') wmts_cache_file = os.path.join(config['archive_basepath'], 'cache_all_wmts.config') wmts_cache = os.path.isfile(wmts_cache_file) wmts_gc_file = os.path.join(config['wmts_gc_path'], 'getCapabilities.xml') wmts_gc = os.path.isfile( os.path.join(config['wmts_gc_path'], 'getCapabilities.xml')) wmts_staging_file = os.path.join(config['wmts_staging_location'], config['prefix'] + '.xml') wmts_staging = os.path.isfile(wmts_staging_file) twms_cache_xml = os.path.join(config['archive_basepath'], 'cache_all_twms.xml') twms_cache_file = os.path.join(config['archive_basepath'], 'cache_all_twms.config') twms_cache = os.path.isfile(twms_cache_file) twms_gc_file = os.path.join(config['twms_gc_path'], 'getCapabilities.xml') twms_gc = os.path.isfile(twms_gc_file) twms_ts_file = os.path.join(config['twms_gc_path'], 'getTileService.xml') twms_ts = os.path.isfile(twms_ts_file) twms_staging_file = os.path.join(config['twms_staging_location'], config['prefix'] + '_gc.xml') twms_staging = os.path.isfile(twms_staging_file) twms_staging_gts_file = os.path.join(config['twms_staging_location'], config['prefix'] + '_gts.xml') twms_staging_gts = os.path.exists(twms_staging_gts_file) twms_staging_mrf_file = os.path.join(config['twms_staging_location'], config['prefix'] + '.mrf') twms_staging_mrf = os.path.exists(twms_staging_mrf_file) self.assertTrue( wmts_cache, "Default layer config test -- cache_all_wmts.config not created") self.assertTrue( wmts_gc, 'Default layer config test -- WMTS getCapabilities.xml does not exist' ) self.assertTrue( wmts_staging, 'Default layer config test -- staging file ' + wmts_staging_file + ' does not exist in WMTS staging area') self.assertTrue( twms_cache, 'Default layer config test -- cache_all_twms.config does not exist' ) self.assertTrue( twms_gc, 'Default layer config test -- TWMS getCapabilities.xml does not exist' ) self.assertTrue( twms_ts, 'Default layer config test -- TWMS getTileService.xml does not exist' ) self.assertTrue( twms_staging, 'Default layer config test -- staging file ' + twms_staging_file + ' does not exist in TWMS staging area') self.assertTrue( twms_staging_gts, 'Default layer config test -- staging file ' + twms_staging_gts_file + ' does not exist in TWMS staging area') self.assertTrue( twms_staging_mrf, 'Default layer config test -- staging file ' + twms_staging_mrf_file + ' does not exist in TWMS staging area') rmtree(config['legend_location']) rmtree(colormap_location) # String searches in the GC and config filenames search_string = '<ows:Identifier>' + config[ 'identifier'] + '</ows:Identifier>' contains_layer = find_string(wmts_gc_file, search_string) os.remove(wmts_gc_file) os.remove(wmts_cache_xml) self.assertTrue( contains_layer, 'Default layer config test -- WMTS GetCapabilities does not contain layer' ) # Unicode weirdness in the binary configs necessitates running str() on the search strings search_string = str( 'SERVICE=WMTS&REQUEST=GetTile&VERSION=1.0.0&LAYER=' + config['identifier'] + '&STYLE=(default)?&TILEMATRIXSET=EPSG3413_500m&TILEMATRIX=[0-9]*&TILEROW=[0-9]*&TILECOL=[0-9]*&FORMAT=image%2Fjpeg' ) contains_layer = find_string(wmts_cache_file, search_string) os.remove(wmts_cache_file) os.remove(twms_cache_xml) self.assertTrue( contains_layer, 'Default layer config test -- WMTS cache configuration does not contain layer' ) search_string = '<Name>' + config['identifier'] + '</Name>' contains_layer = find_string(twms_gc_file, search_string) os.remove(twms_gc_file) self.assertTrue( contains_layer, 'Default layer config test -- TWMS GetCapabilities does not contain layer' ) search_string = '<Name>' + config['tiled_group_name'] + '</Name>' contains_layer = find_string(twms_ts_file, search_string) os.remove(twms_ts_file) self.assertTrue( contains_layer, 'Default layer config test -- GetTileService does not contain layer' ) search_string = str( 'request=GetMap&layers=' + config['prefix'] + '&srs=EPSG:3413&format=image%2Fjpeg&styles=&width=512&height=512&bbox=[-,\.0-9+Ee]' ) contains_layer = find_string(twms_cache_file, search_string) os.remove(twms_cache_file) self.assertTrue( contains_layer, 'Default layer config test -- TWMS cache configuration does not contain layer' ) rmtree(config['archive_location']) rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location'])
def test_intermittent_zlevel_period_detection(self): """ Checks that the start and end periods of a z-level file are being correctly detected. """ if DEBUG: print '\nTESTING INTERMITTENT Z-LEVEL PERIOD DETECTION...' layer_config = os.path.join(self.testfiles_path, 'conf/test_zindex_detect.xml') test_periods = (('minutes', 1), ('minutes', 5), ('hours', 1)) start_datetime = datetime.datetime(2014, 6, 1, 12, 0, 0) start_datestring = str(start_datetime.year) + str( start_datetime.timetuple().tm_yday).zfill(3) config = get_layer_config(layer_config, self.archive_config) for period_unit, period_length in test_periods: # Make archive location dir archive_location = os.path.join(config['archive_location'], str(start_datetime.year)) make_dir_tree(archive_location) # Make temp GC and archive directories and dummy MRF make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['wmts_staging_location']) make_dir_tree(config['twms_staging_location']) dummy_mrf = os.path.join( archive_location, config['prefix'] + start_datestring + '_.mrf') open(dummy_mrf, 'a').close() # Create a ZDB file for seeding with the dates we're looking for zdb_path = os.path.join( archive_location, config['prefix'] + start_datestring + '_.zdb') conn = sqlite3.connect(zdb_path) # Create ZINDEX table, generate test dates, and populate ZDB file conn.execute( 'CREATE TABLE ZINDEX(z INTEGER PRIMARY KEY AUTOINCREMENT, key_str TEXT);' ) test_intervals = testutils.create_intermittent_period_test_files( config['archive_location'], period_unit, period_length, 5, start_datetime, prefix=config['prefix'], no_files=True) # Create debug output message (flatten array w/ itertools) if DEBUG: print 'Testing with {0} {1} periods'.format( period_length, period_unit) print 'Creating ZDB with dates: ' for date in itertools.chain.from_iterable(test_intervals): print date.isoformat() # Populate the dates in the ZDB (flatten array w/ itertools) for i, date in enumerate( itertools.chain.from_iterable(test_intervals)): z_key = date.strftime('%Y%m%d%H%M%S') sql = 'INSERT INTO ZINDEX(z, key_str) VALUES ({0}, {1})'.format( i, z_key) conn.execute(sql) conn.commit() # Close ZDB and run layer config conn.close() # Run layer config command cmd = 'oe_configure_layer -l {0} -z -e -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd, ignore_warnings=True) # Check to see if proper period in GetCapabilities wmts_gc_file = os.path.join(config['wmts_gc_path'], 'getCapabilities.xml') twms_gc_file = os.path.join(config['twms_gc_path'], 'getCapabilities.xml') # Build GC search string search_strings = [] for interval in test_intervals: search_string = "<Value>" + interval[0].isoformat( ) + "Z/" + interval[-1].isoformat() + "Z</Value>" search_strings.append(search_string) if DEBUG: print '\n' + 'Searching for strings in GetCapabilities: ' for string in search_strings: print string # Check to see if string exists in the GC files error = "{0} {1} intermittent z-level period detection failed -- not found in WMTS GetCapabilities".format( period_length, period_unit) check_result = all(string for string in search_strings if find_string(wmts_gc_file, string)) # Cleanup conn.close() rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) rmtree(config['archive_location']) # Check result self.assertTrue(check_result, error)
def test_intermittent_period_detection(self): if DEBUG: print '\nTESTING INTERMITTENT PERIOD DETECTION...' layer_config = os.path.join(self.testfiles_path, 'conf/test_period_detection.xml') # Pick the start time for the dates that will be generated start_datetime = datetime.datetime(2014, 6, 1) # Set the various period units and lengths we'll be testing test_periods = (('days', 1), ('days', 5), ('months', 1), ('years', 1)) config = get_layer_config(layer_config, self.archive_config) for period_unit, period_length in test_periods: # We test detection with both year and non-year directory setups for year_dir in (True, False): make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) make_dir_tree(config['wmts_staging_location']) make_dir_tree(config['twms_staging_location']) # Generate the empty test files test_intervals = testutils.create_intermittent_period_test_files( config['archive_location'], period_unit, period_length, 5, start_datetime, prefix=config['prefix'], make_year_dirs=year_dir) # Run layer config command for daily test days cmd = 'oe_configure_layer -l {0} -z -e -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd, ignore_warnings=True) # Check to see if proper period in GetCapabilities wmts_gc_file = os.path.join(config['wmts_gc_path'], 'getCapabilities.xml') twms_gc_file = os.path.join(config['twms_gc_path'], 'getCapabilities.xml') # Build a list GC search strings search_strings = [] for interval in test_intervals: search_string = "<Value>" + interval[0].isoformat() + "/" + interval[-1].isoformat() + \ "/P{0}{1}</Value>".format(period_length, period_unit[0].upper()) search_strings.append(search_string) # Create debug output message if DEBUG: print '\n' + 'Creating dates: ' dates = [ date for date in interval for interval in test_intervals ] for date in dates: print date.isoformat() print '\n' + 'Searching for string(s) in GetCapabilities: ' for string in search_strings: print search_string # Check to see if string exists in the GC files wmts_error = "{0} {1} intermittent period detection failed -- not found in WMTS GetCapabilities".format( period_length, period_unit) # twms_error = "{0} {1} period detection failed -- not found in TMWS GetCapabilities".format(period_length, period_unit) # self.assertTrue(find_string(twms_gc_file, search_string), twms_error) search_result = all(string for string in search_strings if find_string(wmts_gc_file, string)) # Cleanup -- make sure to get rid of staging files rmtree(config['wmts_gc_path']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) rmtree(config['archive_location']) # Check result self.assertTrue(search_result, wmts_error)
def test_layer_config_legends(self): # Set config files and reference hash for checking empty tile layer_config = os.path.join(self.testfiles_path, 'conf/test_legend_generation.xml') h_legend_ref_hash = '45223e22a673700d52f17c6658eac7e0' v_legend_ref_hash = 'cf9b632f30fbdbea466a489ecf363d76' config = get_layer_config(layer_config, self.archive_config) # Create legend, archive, and colormap dirs make_dir_tree(config['legend_location']) make_dir_tree(config['colormap_locations'][0].firstChild.nodeValue) make_dir_tree(config['archive_location']) make_dir_tree(config['wmts_gc_path']) make_dir_tree(config['twms_gc_path']) # Copy colormap to colormaps dir copy( os.path.join(self.testfiles_path, 'conf/' + config['colormaps'][0].firstChild.nodeValue), config['colormap_locations'][0].firstChild.nodeValue) # Run layer config tool cmd = 'oe_configure_layer -l{0} --skip_empty_tiles -g -a {1} -c {2} -p {3} -m {4}'.format( self.testfiles_path, self.archive_config, layer_config, self.projection_config, self.tilematrixset_config) run_command(cmd) """Get hashes of generated legends Note that matplotlib 1.5.1 generates unique ID values for style references, making each new file different. We strip these unique references before hashing the file so we can have a baseline for testing. """ hasher = hashlib.md5() stripped_file = '' try: with open( os.path.join(config['legend_location'], config['prefix'] + '_H.svg'), 'r') as f: file_str = f.read() stripped_file = re.sub('(id="[#A-Za-z0-9]{11}")', '', file_str) stripped_file = re.sub('(xlink:href="[#A-Za-z0-9]{12}")', '', stripped_file) stripped_file = re.sub('(clip-path="url\([#A-Za-z0-9]{12}\)")', '', stripped_file) hasher.update(stripped_file) h_legend_hash = hasher.hexdigest() except OSError: raise ValueError('Horizontal legend not generated') try: with open( os.path.join(config['legend_location'], config['prefix'] + '_V.svg'), 'r') as f: file_str = f.read() stripped_file = re.sub('(id="[#A-Za-z0-9]{11}")', '', file_str) stripped_file = re.sub('(xlink:href="[#A-Za-z0-9]{12}")', '', stripped_file) stripped_file = re.sub('(clip-path="url\([#A-Za-z0-9]{12}\)")', '', stripped_file) hasher.update(stripped_file) v_legend_hash = hasher.hexdigest() except OSError: raise ValueError('Vertical legend not generated') # Cleanup rmtree(config['wmts_gc_path']) rmtree(config['colormap_locations'][0].firstChild.nodeValue) rmtree(config['legend_location']) rmtree(config['wmts_staging_location']) rmtree(config['twms_staging_location']) # Check if hashes are kosher self.assertEqual( h_legend_ref_hash, h_legend_hash, 'Horizontal legend generated does not match expected.') self.assertEqual(v_legend_ref_hash, v_legend_hash, 'Vertical legend generated does not match expected.')