def test_insert_tile_record(self): """Test the Landsat tiling process method by comparing output to a file on disk.""" # pylint: disable=too-many-locals # Test a single dataset for tile_record creation processing_level = 'PQA' dataset_path = TestIngest.DATASETS_TO_INGEST[processing_level][0] LOGGER.info('Testing Dataset %s', dataset_path) dset = LandsatDataset(dataset_path) # Create a DatasetRecord instance so that we can access its # list_tile_types() method. In doing this we need to create a # collection object and entries on the acquisition and dataset # tables of the database. self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) # List the benchmark footprints associated with this datset ftprints = \ TestTileContents.get_benchmark_footprints(dset_record.mdd, TestIngest.BENCHMARK_DIR) LOGGER.info('bench_footprints=%s', str(ftprints)) # Get tile types dummy_tile_type_list = dset_record.list_tile_types() # Assume dataset has tile_type = 1 only: tile_type_id = 1 dataset_bands_dict = dset_record.get_tile_bands(tile_type_id) ls_bandstack = dset.stack_bands(dataset_bands_dict) temp_dir = os.path.join(self.ingester.datacube.tile_root, 'ingest_temp') # Form scene vrt ls_bandstack.buildvrt(temp_dir) # Reproject scene data onto selected tile coverage tile_footprint_list = dset_record.get_coverage(tile_type_id) LOGGER.info('coverage=%s', str(tile_footprint_list)) for tile_footprint in tile_footprint_list: tile_contents = \ self.collection.create_tile_contents(tile_type_id, tile_footprint, ls_bandstack) LOGGER.info('reprojecting for %s tile %s', processing_level, str(tile_footprint)) #Need to call reproject to set tile_contents.tile_extents tile_contents.reproject() if tile_contents.has_data(): dummy_tile_record = \ dset_record.create_tile_record(tile_contents) self.collection.commit_transaction()
def open_dataset(self, dataset_path): """Create and return a dataset object. dataset_path: points to the dataset to be opened and have its metadata read. """ return LandsatDataset(dataset_path)
def test_get_bbox_dataset(self, tile_type_id=1): """Test the DatasetRecord class get_bbox() method on six landsat datasets.""" #pylint: disable=too-many-locals cube_tile_size = \ (self.ingester.datacube.tile_type_dict[tile_type_id]['x_size'], self.ingester.datacube.tile_type_dict[tile_type_id]['y_size']) cube_pixels = \ (self.ingester.datacube.tile_type_dict[tile_type_id]['x_pixels'], self.ingester.datacube.tile_type_dict[tile_type_id]['y_pixels']) tile_crs = \ self.ingester.datacube.tile_type_dict[tile_type_id]['crs'] for idataset in range(len(DATASETS_TO_INGEST)): # Get information required for calculating the bounding box. dset = LandsatDataset(DATASETS_TO_INGEST[idataset]) dataset_crs = dset.get_projection() geotrans = dset.get_geo_transform() pixels = dset.get_x_pixels() lines = dset.get_y_pixels() # Create a DatasetRecord instance so that we can test its # get_bbox() method. In doing this we need to create a # collection object and entries on the acquisition and dataset # tables of the database. self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) self.collection.commit_transaction() # Test the DatasetRecord get_bbox() method #Determine the bounding quadrilateral of the dataset extent transformation = \ dset_record.define_transformation(dataset_crs, tile_crs) bbox = dset_record.get_bbox(transformation, geotrans, pixels, lines) reference_dataset_bbox = DATASET_BBOX[idataset] #Check bounding box is as expected print 'Checking bbox for Dataset %d' %idataset residual_in_pixels = \ [((x2 - x1) * cube_pixels[0] / cube_tile_size[0], (y2 - y1) * cube_pixels[1] / cube_tile_size[1]) for ((x1, y1), (x2, y2)) in zip(reference_dataset_bbox, bbox)] assert all(abs(dx) < TOLERANCE and abs(dy) < TOLERANCE for (dx, dy) in residual_in_pixels), \ "bounding box calculation incorrect"
def test_make_mosaics(self): """Make mosaic tiles from two adjoining scenes.""" # pylint: disable=too-many-locals dataset_list = \ [TestIngest.DATASETS_TO_INGEST[level][i] for i in range(6) for level in ['PQA', 'NBAR', 'ORTHO']] dataset_list.extend(TestIngest.MOSAIC_SOURCE_NBAR) dataset_list.extend(TestIngest.MOSAIC_SOURCE_PQA) dataset_list.extend(TestIngest.MOSAIC_SOURCE_ORTHO) random.shuffle(dataset_list) LOGGER.info("Ingesting following datasets:") for dset in dataset_list: LOGGER.info('%d) %s', dataset_list.index(dset), dset) for dataset_path in dataset_list: LOGGER.info('Ingesting Dataset %d:\n%s', dataset_list.index(dataset_path), dataset_path) dset = LandsatDataset(dataset_path) self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) # Get tile types dummy_tile_type_list = dset_record.list_tile_types() # Assume dataset has tile_type = 1 only: tile_type_id = 1 dataset_bands_dict = dset_record.get_tile_bands(tile_type_id) ls_bandstack = dset.stack_bands(dataset_bands_dict) temp_dir = os.path.join(self.ingester.datacube.tile_root, 'ingest_temp') # Form scene vrt ls_bandstack.buildvrt(temp_dir) # Reproject scene data onto selected tile coverage tile_footprint_list = dset_record.get_coverage(tile_type_id) LOGGER.info('coverage=%s', str(tile_footprint_list)) for tile_ftprint in tile_footprint_list: #Only do that footprint for which we have benchmark mosaics if tile_ftprint not in [(141, -38)]: continue tile_contents = \ self.collection.create_tile_contents(tile_type_id, tile_ftprint, ls_bandstack) LOGGER.info('Calling reproject for %s tile %s...', dset_record.mdd['processing_level'], tile_ftprint) tile_contents.reproject() LOGGER.info('...finished') if tile_contents.has_data(): LOGGER.info('tile %s has data', tile_contents.temp_tile_output_path) tile_record = dset_record.create_tile_record(tile_contents) mosaic_required = tile_record.make_mosaics() if not mosaic_required: continue # Test mosaic tiles against benchmark # At this stage, transaction for this dataset not yet # commited and so the tiles from this dataset, including # any mosaics are still in the temporary location. if self.POPULATE_EXPECTED: continue mosaic_benchmark = \ TestTileContents.swap_dir_in_path(tile_contents .mosaic_final_pathname, 'output', 'expected') mosaic_new = tile_contents.mosaic_temp_pathname LOGGER.info("Comparing test output with benchmark:\n"\ "benchmark: %s\ntest output: %s", mosaic_benchmark, mosaic_new) if dset_record.mdd['processing_level'] == 'PQA': LOGGER.info("For PQA mosaic, calling load_and_check...") ([data1, data2], dummy_nlayers) = \ TestLandsatTiler.load_and_check( mosaic_benchmark, mosaic_new, tile_contents.band_stack.band_dict, tile_contents.band_stack.band_dict) LOGGER.info('Checking arrays ...') if ~(data1 == data2).all(): self.fail("Difference in PQA mosaic " "from expected result: %s and %s" %(mosaic_benchmark, mosaic_new)) # Check that differences are due to differing treatment # of contiguity bit. else: diff_cmd = ["diff", "-I", "[Ff]ilename", "%s" %mosaic_benchmark, "%s" %mosaic_new ] result = execute(diff_cmd, shell=False) assert result['stdout'] == '', \ "Differences between vrt files" assert result['stderr'] == '', \ "Error in system diff command" else: LOGGER.info('... tile has no data') tile_contents.remove() self.collection.commit_transaction()
def test_make_mosaics(self): """Make mosaic tiles from two adjoining scenes.""" # pylint: disable=too-many-locals nbar1, nbar2 = TestIngest.MOSAIC_SOURCE_NBAR ortho1, ortho2 = TestIngest.MOSAIC_SOURCE_ORTHO pqa1, pqa2 = TestIngest.MOSAIC_SOURCE_PQA # Set the list of datset paths which should result in mosaic tiles dataset_list = [nbar1, nbar2, ortho1, ortho2, pqa1, pqa2] dataset_list = [pqa1, pqa2] for dataset_path in dataset_list: dset = LandsatDataset(dataset_path) self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) # Get tile types dummy_tile_type_list = dset_record.list_tile_types() # Assume dataset has tile_type = 1 only: tile_type_id = 1 dataset_bands_dict = dset_record.get_tile_bands(tile_type_id) ls_bandstack = dset.stack_bands(dataset_bands_dict) temp_dir = os.path.join(self.ingester.datacube.tile_root, 'ingest_temp') # Form scene vrt ls_bandstack.buildvrt(temp_dir) # Reproject scene data onto selected tile coverage tile_footprint_list = dset_record.get_coverage(tile_type_id) LOGGER.info('coverage=%s', str(tile_footprint_list)) for tile_ftprint in tile_footprint_list: #Only do that footprint for which we have benchmark mosaics if tile_ftprint not in [(150, -26)]: continue tile_contents = \ self.collection.create_tile_contents(tile_type_id, tile_ftprint, ls_bandstack) LOGGER.info('Calling reproject for %s tile %s...', dset_record.mdd['processing_level'], tile_ftprint) tile_contents.reproject() LOGGER.info('...finished') if tile_contents.has_data(): LOGGER.info('tile %s has data', tile_contents.temp_tile_output_path) tile_record = dset_record.create_tile_record(tile_contents) mosaic_required = tile_record.make_mosaics() if not mosaic_required: continue #Test mosaic tiles against benchmark mosaic_benchmark = TestTileContents.get_benchmark_tile( dset_record.mdd, os.path.join(TestIngest.BENCHMARK_DIR, 'mosaic_cache'), tile_ftprint) mosaic_new = TestTileContents.get_benchmark_tile( dset_record.mdd, os.path.join(os.path.dirname( tile_contents.temp_tile_output_path), 'mosaic_cache'), tile_ftprint) LOGGER.info("Calling load_and_check...") ([data1, data2], dummy_nlayers) = \ TestLandsatTiler.load_and_check( mosaic_benchmark, mosaic_new, tile_contents.band_stack.band_dict, tile_contents.band_stack.band_dict) LOGGER.info('Checking arrays ...') if dset_record.mdd['processing_level'] == 'PQA': ind = (data1 == data2) # Check that differences are due to differing treatment # of contiguity bit. data1_diff = data1[~ind] data2_diff = data2[~ind] contiguity_diff = \ np.logical_or( np.bitwise_and(data1_diff, 1 << 8) == 0, np.bitwise_and(data2_diff, 1 << 8) == 0) assert contiguity_diff.all(), \ "mosaiced tile %s differs from benchmark %s" \ %(mosaic_new, mosaic_benchmark) else: diff_cmd = ["diff", "-I", "[Ff]ilename", "%s" %mosaic_benchmark, "%s" %mosaic_new ] result = execute(diff_cmd, shell=False) assert result['stdout'] == '', \ "Differences between vrt files" assert result['stderr'] == '', \ "Error in system diff command" else: LOGGER.info('... tile has no data') tile_contents.remove() self.collection.commit_transaction()
def test_make_mosaics(self): """Make mosaic tiles from two adjoining scenes.""" # pylint: disable=too-many-locals nbar1, nbar2 = TestIngest.MOSAIC_SOURCE_NBAR ortho1, ortho2 = TestIngest.MOSAIC_SOURCE_ORTHO pqa1, pqa2 = TestIngest.MOSAIC_SOURCE_PQA # Set the list of datset paths which should result in mosaic tiles dataset_list = [nbar1, nbar2, ortho1, ortho2, pqa1, pqa2] dataset_list = [pqa1, pqa2] for dataset_path in dataset_list: dset = LandsatDataset(dataset_path) self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) # Get tile types dummy_tile_type_list = dset_record.list_tile_types() # Assume dataset has tile_type = 1 only: tile_type_id = 1 dataset_bands_dict = dset_record.get_tile_bands(tile_type_id) ls_bandstack = dset.stack_bands(dataset_bands_dict) temp_dir = os.path.join(self.ingester.datacube.tile_root, 'ingest_temp') # Form scene vrt ls_bandstack.buildvrt(temp_dir) # Reproject scene data onto selected tile coverage tile_footprint_list = dset_record.get_coverage(tile_type_id) LOGGER.info('coverage=%s', str(tile_footprint_list)) for tile_ftprint in tile_footprint_list: #Only do that footprint for which we have benchmark mosaics if tile_ftprint not in [(150, -26)]: continue tile_contents = \ self.collection.create_tile_contents(tile_type_id, tile_ftprint, ls_bandstack) LOGGER.info('Calling reproject for %s tile %s...', dset_record.mdd['processing_level'], tile_ftprint) tile_contents.reproject() LOGGER.info('...finished') if tile_contents.has_data(): LOGGER.info('tile %s has data', tile_contents.temp_tile_output_path) tile_record = dset_record.create_tile_record(tile_contents) mosaic_required = tile_record.make_mosaics() if not mosaic_required: continue #Test mosaic tiles against benchmark mosaic_benchmark = TestTileContents.get_benchmark_tile( dset_record.mdd, os.path.join(TestIngest.BENCHMARK_DIR, 'mosaic_cache'), tile_ftprint) mosaic_new = TestTileContents.get_benchmark_tile( dset_record.mdd, os.path.join( os.path.dirname( tile_contents.temp_tile_output_path), 'mosaic_cache'), tile_ftprint) LOGGER.info("Calling load_and_check...") ([data1, data2], dummy_nlayers) = \ TestLandsatTiler.load_and_check( mosaic_benchmark, mosaic_new, tile_contents.band_stack.band_dict, tile_contents.band_stack.band_dict) LOGGER.info('Checking arrays ...') if dset_record.mdd['processing_level'] == 'PQA': ind = (data1 == data2) # Check that differences are due to differing treatment # of contiguity bit. data1_diff = data1[~ind] data2_diff = data2[~ind] contiguity_diff = \ np.logical_or( np.bitwise_and(data1_diff, 1 << 8) == 0, np.bitwise_and(data2_diff, 1 << 8) == 0) assert contiguity_diff.all(), \ "mosaiced tile %s differs from benchmark %s" \ %(mosaic_new, mosaic_benchmark) else: diff_cmd = [ "diff", "-I", "[Ff]ilename", "%s" % mosaic_benchmark, "%s" % mosaic_new ] result = execute(diff_cmd, shell=False) assert result['stdout'] == '', \ "Differences between vrt files" assert result['stderr'] == '', \ "Error in system diff command" else: LOGGER.info('... tile has no data') tile_contents.remove() self.collection.commit_transaction()
def test_get_coverage(self, tile_type_id=1): # pylint: disable=too-many-locals """Test the methods called by the dataset_record.get_coverage() method. The constants at the top of this file provide test data expected to be returned by the tested get_coverage methods: 1. TILE_XLL, TILE_YLL,... : dataset bounding box in tile projection coordinates TILE_CRS 2. DEFINITE_TILES: tiles in inner rectangle 3. POSSIBLE_TILES: tiles in outer rectangle 4. INTERSECTED_TILES: those tiles from the outer rectangle that intersect the dataset bounding box 5. CONTAINED_TILES: those tiles from outer rectangle wholly contained in the dataset bounding box 6. COVERAGE: the tiles to be returned from DatasetRecord.get_coverage() """ total_definite_tiles = set() total_possible_tiles = set() total_intersected_tiles = set() total_contained_tiles = set() total_touched_tiles = set() total_coverage = set() cube_origin = \ (self.ingester.datacube.tile_type_dict[tile_type_id]['x_origin'], self.ingester.datacube.tile_type_dict[tile_type_id]['y_origin']) cube_tile_size = \ (self.ingester.datacube.tile_type_dict[tile_type_id]['x_size'], self.ingester.datacube.tile_type_dict[tile_type_id]['y_size']) tile_crs = \ self.ingester.datacube.tile_type_dict[tile_type_id]['crs'] for idataset in range(len(DATASETS_TO_INGEST)): print 'Getting the coverage from Dataset %d' %idataset dset = LandsatDataset(DATASETS_TO_INGEST[idataset]) dataset_crs = dset.get_projection() geotrans = dset.get_geo_transform() pixels = dset.get_x_pixels() lines = dset.get_y_pixels() # Create a DatasetRecord instance so that we can test its # get_coverage() method. In doing this we need to create a # collection object and entries on the acquisition and dataset # tables of the database. self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) self.collection.commit_transaction() # Test the DatasetRecord get_bbox() method #Determine the bounding quadrilateral of the dataset extent transformation = \ dset_record.define_transformation(dataset_crs, tile_crs) #Determine the bounding quadrilateral of the dataset extent bbox = dset_record.get_bbox(transformation, geotrans, pixels, lines) #Get the definite and possible tiles from this dataset and #accumulate in running total definite_tiles, possible_tiles = \ dset_record.get_definite_and_possible_tiles(bbox, cube_origin, cube_tile_size) total_definite_tiles = \ total_definite_tiles.union(definite_tiles) total_possible_tiles = \ total_possible_tiles.union(possible_tiles) #Get intersected tiles and accumulate in running total intersected_tiles = \ dset_record.get_intersected_tiles(possible_tiles, bbox, cube_origin, cube_tile_size) total_intersected_tiles = \ total_intersected_tiles.union(intersected_tiles) #Take out intersected tiles from possibole tiles and get contained possible_tiles = possible_tiles.difference(intersected_tiles) contained_tiles = \ dset_record.get_contained_tiles(possible_tiles, bbox, cube_origin, cube_tile_size) total_contained_tiles = \ total_contained_tiles.union(contained_tiles) #Use parent method to get touched tiles touched_tiles = \ dset_record.get_touched_tiles(bbox, cube_origin, cube_tile_size) total_touched_tiles = total_touched_tiles.union(touched_tiles) #use parent method get_coverage to get coverage coverage = dset_record.get_coverage(tile_type_id) total_coverage = total_coverage.union(coverage) #Check definite and possible tiles are as expected assert total_definite_tiles == DEFINITE_TILES, \ "Set of definite tiles disagrees with test data" assert total_possible_tiles == POSSIBLE_TILES, \ "Set of possible tiles disagrees with test data" #Check intersected tiles are as expected assert total_intersected_tiles == INTERSECTED_TILES, \ "Set of intersected tiles disagrees with test data" #Check contained tiles are as expected assert total_contained_tiles == CONTAINED_TILES, \ "Set of tiles not in the definite set but wholly contained " \ "within the dataset bbox does not agree with test data" #Check results of get_touced_tiles against expectations assert total_touched_tiles == COVERAGE, \ "Set of tiles returned by get_touched_tiles does not agree " \ "with test data" assert total_coverage == COVERAGE, \ "Set of tiles returned by get_coverage does not agree " \ "with test data"
def test_make_mosaics(self): """Make mosaic tiles from two adjoining scenes.""" # pylint: disable=too-many-locals dataset_list = \ [TestIngest.DATASETS_TO_INGEST[level][i] for i in range(6) for level in ['PQA', 'NBAR', 'ORTHO']] dataset_list.extend(TestIngest.MOSAIC_SOURCE_NBAR) dataset_list.extend(TestIngest.MOSAIC_SOURCE_PQA) dataset_list.extend(TestIngest.MOSAIC_SOURCE_ORTHO) random.shuffle(dataset_list) LOGGER.info("Ingesting following datasets:") for dset in dataset_list: LOGGER.info('%d) %s', dataset_list.index(dset), dset) for dataset_path in dataset_list: LOGGER.info('Ingesting Dataset %d:\n%s', dataset_list.index(dataset_path), dataset_path) dset = LandsatDataset(dataset_path) self.collection.begin_transaction() acquisition = \ self.collection.create_acquisition_record(dset) dset_record = acquisition.create_dataset_record(dset) # Get tile types dummy_tile_type_list = dset_record.list_tile_types() # Assume dataset has tile_type = 1 only: tile_type_id = 1 dataset_bands_dict = dset_record.get_tile_bands(tile_type_id) ls_bandstack = dset.stack_bands(dataset_bands_dict) temp_dir = os.path.join(self.ingester.datacube.tile_root, 'ingest_temp') # Form scene vrt ls_bandstack.buildvrt(temp_dir) # Reproject scene data onto selected tile coverage tile_footprint_list = dset_record.get_coverage(tile_type_id) LOGGER.info('coverage=%s', str(tile_footprint_list)) for tile_ftprint in tile_footprint_list: #Only do that footprint for which we have benchmark mosaics if tile_ftprint not in [(141, -38)]: continue tile_contents = \ self.collection.create_tile_contents(tile_type_id, tile_ftprint, ls_bandstack) LOGGER.info('Calling reproject for %s tile %s...', dset_record.mdd['processing_level'], tile_ftprint) tile_contents.reproject() LOGGER.info('...finished') if tile_contents.has_data(): LOGGER.info('tile %s has data', tile_contents.temp_tile_output_path) tile_record = dset_record.create_tile_record(tile_contents) mosaic_required = tile_record.make_mosaics() if not mosaic_required: continue # Test mosaic tiles against benchmark # At this stage, transaction for this dataset not yet # commited and so the tiles from this dataset, including # any mosaics are still in the temporary location. if self.POPULATE_EXPECTED: continue mosaic_benchmark = \ TestTileContents.swap_dir_in_path(tile_contents .mosaic_final_pathname, 'output', 'expected') mosaic_new = tile_contents.mosaic_temp_pathname LOGGER.info("Comparing test output with benchmark:\n"\ "benchmark: %s\ntest output: %s", mosaic_benchmark, mosaic_new) if dset_record.mdd['processing_level'] == 'PQA': LOGGER.info( "For PQA mosaic, calling load_and_check...") ([data1, data2], dummy_nlayers) = \ TestLandsatTiler.load_and_check( mosaic_benchmark, mosaic_new, tile_contents.band_stack.band_dict, tile_contents.band_stack.band_dict) LOGGER.info('Checking arrays ...') if ~(data1 == data2).all(): self.fail("Difference in PQA mosaic " "from expected result: %s and %s" % (mosaic_benchmark, mosaic_new)) # Check that differences are due to differing treatment # of contiguity bit. else: diff_cmd = [ "diff", "-I", "[Ff]ilename", "%s" % mosaic_benchmark, "%s" % mosaic_new ] result = execute(diff_cmd, shell=False) assert result['stdout'] == '', \ "Differences between vrt files" assert result['stderr'] == '', \ "Error in system diff command" else: LOGGER.info('... tile has no data') tile_contents.remove() self.collection.commit_transaction()