def process_file(file): """Read and process one file and write the results to disc in the same format as the input file. :param file: the file to process :type file: str :param config: the config namespace from the command line parser :type config: argparse.Namespace """ # read data data, profile = floodfill.read_data(file) # process data data = floodfill.isolate_burned_pixels(data, config.upper_value, config.lower_value) fire_ids, burn_dates = floodfill.floodfill(data, config.cut_off) # write data fname, ext = os.path.splitext(os.path.basename(file)) # save burndates if required if config.save_bd: out_path_bds = os.path.join(config.output_folder, f"{fname}-floodfill_burndates{ext}") floodfill.write_data(out_path_bds, burn_dates, profile) # save patch ids out_path_ids = os.path.join(config.output_folder, f"{fname}-floodfill_ids{ext}") floodfill.write_data(out_path_ids, fire_ids, profile)
def process_file(file, config): """Read and process one file and write the results to disc in the same format as the input file. :param file: the file to process :type file: str :param config: the config namespace from the command line parser :type config: argparse.Namespace """ # read data data, profile = floodfill.read_data(file) # process data data = floodfill.isolate_burned_pixels(data, config.upper_value, config.lower_value) fire_ids, burn_dates = floodfill.run_algo(name=config.algorithm, raster=data, cut_off=config.cut_off) # save burndates if required if config.save_bd: out_path_bds = _get_filename(config.output_folder, file, 'floodfill_burndates') floodfill.write_data(out_path_bds, burn_dates, profile) # save patch ids out_path_ids = _get_filename(config.output_folder, file, 'floodfill_ids') floodfill.write_data(out_path_ids, fire_ids, profile)
def test_writing(self): with tempfile.TemporaryDirectory() as tmp: file_name = os.path.join(tmp, 'test.tif') floodfill.write_data(file_name, self.data, self.profile) self.assertTrue(os.path.isfile(file_name)) # read data again and check if it is the same data_tmp, profile_tmp = floodfill.read_data(file_name) self.assertTrue(numpy.all(data_tmp == self.data))
def test_floodfill(self): """Test the floodfill algorithm itself. """ data, _ = floodfill.read_data(TEST_FILE) data = floodfill.isolate_burned_pixels(data, 366, 1) ids, burn_dates = nogueira_etal.run(data, 3) self.assertEqual(ids.max(), 929) # correct number of fires found? # are the burn dates still the same as in the input? self.assertTrue(numpy.all(burn_dates == data))
def test_process_file(self): data, _ = floodfill.read_data(TEST_FILE) with tempfile.TemporaryDirectory() as tmp: config = __main__.parse_command_line( [f'--input={TEST_FILE}', f'--output-folder={tmp}', '-b']) __main__.process_file(config.input, config=config) # construct file names bd_file = __main__._get_filename(config.output_folder, config.input, 'floodfill_burndates') id_file = __main__._get_filename(config.output_folder, config.input, 'floodfill_ids') self.assertTrue(os.path.isfile(bd_file)) self.assertTrue(os.path.isfile(id_file)) # read data again and check if it is the same burn_dates, _ = floodfill.read_data(bd_file) self.assertTrue(numpy.all(burn_dates == data))
def test_cleaning(self): """Check if data cleaning works as intended. """ data, _ = floodfill.read_data(TEST_FILE) lower = 1 # lower fire value bound upper = 366 # upper fire value bound data[0, 0] = -1 # create lower outlier data[0, 1] = 10000 # create upper outlier data = floodfill.isolate_burned_pixels(data, upper, lower) self.assertLessEqual(data.max(), upper) min_burned = data[data != 0].min() self.assertGreaterEqual(min_burned, lower)
def setUp(self): self.data, self.profile = floodfill.read_data(TEST_FILE)