def test_reg_vs_natif(self): tiles = ["ISRAW906", "UNH", "SUDOUE_6", "CHILE"] dates = ["20180317T120000", "20180329T120000", "20191110T120000", "20200311T120000"] levels = ["l2a", "l1c", "l1c", "l1c"] for prod, tile, date, level in zip(self.prod_vs_nat, tiles, dates, levels): p = MajaProduct.factory(prod) self.assertIsInstance(p, VenusNatif) self.assertEqual(p.level, level) self.assertEqual(p.platform, "venus") self.assertEqual(p.type, "natif") self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertEqual(os.path.basename(p.metadata_file), prod.split(".")[0] + ".HDR") self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, True) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{'name': 'XS', 'val': '5 -5'}]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_vs_mus + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, VenusNatif)
def generate(self, **kwargs): import random from Chain import Product from Common import TestFunctions, FileSystem platform_specifier = random.choice( self.platform_options["L1C"][self.platform]) orbit = kwargs.get("orbit", random.randint(0, 999)) version_orbit = kwargs.get("version", random.randint(0, 9)) if "sentinel2" in self.platform: date_str = self.date.strftime("%Y%m%dT%H%M%S") product_name = "_".join([ platform_specifier, "MSIL1C", date_str, "N" + str(orbit).zfill(4), "R" + str(version_orbit).zfill(3), self.tile, date_str + ".SAFE" ]) product_path = os.path.join(self.root, product_name) metadata_path = os.path.join(product_path, "MTD_MSIL1C.xml") else: date_str = self.date.strftime("%Y%m%d-%H%M%S") product_name = "_".join([ platform_specifier, date_str + "-000", "L2A", self.tile, random.choice("DC"), "V" + str(version_orbit) + "-" + str(version_orbit) ]) product_path = os.path.join(self.root, product_name) metadata_path = os.path.join(product_path, product_name + "_MTD_ALL.xml") self.prod = product_path self.mtd = metadata_path FileSystem.create_directory(product_path) TestFunctions.touch(metadata_path) return Product.MajaProduct.factory(self.prod)
def test_gdal_tile_untile(self): img = np.arange(0., 100.).reshape((10, 10)) path = os.path.join(os.getcwd(), "test_gdal_retile.tif") tile_folder = os.path.join(os.getcwd(), "tiled") ImageIO.write_geotiff(img, path, self.projection, self.coordinates) # Add parasitic file - It should not cause problems path_parasite = os.path.join(tile_folder, "tile_01_01.tif") FileSystem.create_directory(tile_folder) ImageIO.write_geotiff(img, path_parasite, self.projection, self.coordinates) ds_in = GDalDatasetWrapper.from_file(path) self.assertTrue(os.path.exists(path)) tiles = ImageTools.gdal_retile(ds_in, tile_folder, TileWidth=2, TileHeight=2, Overlap=1) self.assertTrue(os.path.isdir(tile_folder)) self.assertEqual(len(tiles), 81) img_read = np.array(ImageIO.tiff_to_array(tiles[-1])) expected = np.array([[88, 89], [98, 99]]) # Some gdal_retile versions are producing the following image: # [[87, 89], [97, 99]]. np.testing.assert_allclose(expected, img_read, atol=1) # Untile ds_untiled = ImageTools.gdal_buildvrt(*tiles) np.testing.assert_allclose(img, ds_untiled.array, atol=1) FileSystem.remove_file(path) FileSystem.remove_directory(tile_folder)
def test_reg_l8_muscate(self): tiles = ["31TCH", "31TCH"] levels = ["l1c", "l2a"] dates = ["20170501T103532", "20170501T103532"] validity = [True, False] for prod, tile, date, level, valid in zip(self.prod_l8_mus, tiles, dates, levels, validity): p = MajaProduct.factory(prod) self.assertIsInstance(p, Landsat8Muscate) self.assertEqual(p.level, level) self.assertEqual(p.nodata, 0) self.assertEqual(p.platform, "landsat8") self.assertEqual(p.type, "muscate") self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertTrue( os.path.basename(p.metadata_file).endswith("_MTD_ALL.xml")) self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, valid) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{ 'name': 'XS', 'val': '30 -30' }]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_l8_lc1 + self.prod_l8_lc2 + self.prod_l8_nat + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, Landsat8Muscate)
def test_reg_vs_muscate(self): tiles = ["KHUMBU", "KHUMBU", "KHUMBU"] levels = ["l1c", "l2a", "l3a"] dates = ["20180201T051359", "20180201T051359", "20180201T000000"] validity = [True, False, False] for prod, tile, date, level, valid in zip(self.prod_vs_mus, tiles, dates, levels, validity): p = MajaProduct.factory(prod) self.assertIsInstance(p, VenusMuscate) self.assertEqual(p.level, level) self.assertEqual(p.platform, "venus") self.assertEqual(p.type, "muscate") self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertTrue(os.path.basename(p.metadata_file).endswith("_MTD_ALL.xml")) self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, valid) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{'name': 'XS', 'val': '5 -5'}]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_vs_nat + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, VenusMuscate)
def test_reg_l8_lc2(self): tiles = ["199029"] dates = ["20170527T120000"] levels = ["l1c"] for prod, tile, date, level in zip(self.prod_l8_lc2, tiles, dates, levels): p = MajaProduct.factory(prod) self.assertIsInstance(p, Landsat8LC2) self.assertEqual(p.level, level) self.assertEqual(p.platform, "landsat8") self.assertEqual(p.type, "natif") self.assertEqual(p.tile, tile) self.assertEqual(p.nodata, 0) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertEqual(os.path.basename(p.metadata_file), prod.split(".")[0] + "_MTL.txt") self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, True) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{ 'name': 'XS', 'val': '30 -30' }]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_l8_lc1 + self.prod_l8_nat + self.prod_l8_mus + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, Landsat8LC2)
def test_create_remove_dir(self): path = os.path.join(os.getcwd(), "test_create_remove_dir") # This throws a log message FileSystem.create_directory(path) self.assertTrue(os.path.isdir(path)) FileSystem.remove_directory(path) self.assertFalse(os.path.isdir(path)) self.assertFalse(os.path.exists(path))
def generate(self, **kwargs): from datetime import datetime import random from Common import FileSystem from Chain.GippFile import GippSet mission_param = kwargs.get("mission", random.choice(["muscate", "natif"])) if mission_param == "tm": self.platform = "sentinel2" mission = self.mission_choices[mission_param][self.platform] satellites = [self.mission_short[self.platform] ] if self.platform != "sentinel2" else ["S2A", "S2B"] with_cams = kwargs.get("cams", True) cams_suffix = "_CAMS" if with_cams else "" out_path = os.path.join( self.root, "_".join([self.platform.upper(), mission_param.upper()]) + cams_suffix) FileSystem.create_directory(out_path) if with_cams: models = ["CONTINEN"] + [ "ORGANICM", "BLACKCAR", "DUST", "SEASALT", "SULPHATE" ] else: models = ["CONTINEN"] allsites = "ALLSITES" hdr_types = ["ALBD", "DIFT", "DIRT", "TOCR", "WATV"] eef_types = ["COMM", "SITE", "SMAC", "EXTL", "QLTL"] tm_types = ["COMM", "EXTL", "QLTL"] version = random.randint(0, 9999) version_str = str(version).zfill(5) start_date = datetime(2014, 12, 30) for sat in satellites: for name in eef_types: self._create_hdr(out_path, sat, name, start_date, version_str, allsites, mission, ".EEF") for name in hdr_types: for model in models: basename = self._create_hdr(out_path, sat, name, start_date, version_str, model, mission, ".HDR") dbl_name = os.path.join(out_path, basename + ".DBL.DIR") FileSystem.create_directory(dbl_name) # For TM: Add an additional set of COMM, EXTL and QLTL files with muscate mission: for name in tm_types: if mission_param != "tm": continue tm_mission = "SENTINEL2" tm_version_str = str(version + 10000).zfill(5) self._create_hdr(out_path, sat, name, start_date, tm_version_str, allsites, tm_mission, ".EEF") return GippSet(self.root, self.platform, mission_param, cams=with_cams)
def __init__(self, site, **kwargs): if not int(gdal.VersionInfo()) >= 2000000: raise ImportError("MNT creation needs Gdal >2.0!") self.site = site self.dem_dir = kwargs.get( "dem_dir", os.path.join(tempfile.gettempdir(), "maja_dem_files")) if not os.path.isdir(self.dem_dir): FileSystem.create_directory(self.dem_dir) self.wdir = kwargs.get("wdir", None) if not self.wdir: self.wdir = tempfile.mkdtemp(prefix="raw_dem_") else: FileSystem.create_directory(self.wdir) # This needs to be provided for EuDEM: self.raw_dem = kwargs.get("raw_dem", None) if not self.raw_dem: self.raw_dem = tempfile.mkdtemp(prefix="raw_dem_") else: FileSystem.create_directory(self.raw_dem) self.raw_gsw = kwargs.get("raw_gsw", None) if not self.raw_gsw: self.raw_gsw = tempfile.mkdtemp(prefix="raw_gsw_") else: FileSystem.create_directory(self.raw_gsw) self.gsw_codes = self.get_gsw_codes(self.site) self.dem_version = kwargs.get("dem_version", None) self.gsw_threshold = kwargs.get("gsw_threshold", 30.) self.gsw_dst = kwargs.get( "gsw_dst", os.path.join(self.wdir, "surface_water_mask.tif")) self.quiet = not kwargs.get("verbose", False)
def reproject(self, **kwargs): out_dir = kwargs.get("out_dir", self.fpath) assert os.path.isdir(out_dir) out_dir = os.path.join(out_dir, self.base) FileSystem.create_directory(out_dir) patterns = kwargs.get("patterns", [r".(tif|jp2)$"]) imgs = [self.find_file(pattern=p) for p in patterns] epsg = kwargs.get("epsg", None) # Flatten imgs = [i for img in imgs for i in img] for img in imgs: if not epsg: drv = GDalDatasetWrapper.from_file(img) epsg = drv.epsg outpath = os.path.join(out_dir, os.path.basename(img)) self._reproject_to_epsg(img, outpath, epsg) return out_dir
def generate(self, **kwargs): import random from Common import FileSystem from Chain.AuxFile import DTMFile mission_param = kwargs.get("mission", self.get_mission()) mission_specifier = "_" if self.platform == "sentinel2" else "" basename = "_".join([ self.mission_short[self.platform] + mission_specifier, "TEST", "AUX", "REFDE2", self.tile, str(random.randint(0, 1000)).zfill(4) ]) self.dbl = os.path.join(self.root, basename + ".DBL.DIR") self.hdr = os.path.join(self.root, basename + ".HDR") FileSystem.create_directory(self.dbl) self.create_dummy_hdr(self.hdr, mission=mission_param + mission_specifier) return DTMFile(self.dbl)
def test_symlink(self): from Common import FileSystem g = GippSet(self.root, "sentinel2", "tm") self.assertFalse(g.check_completeness()) g.download() self.assertTrue(g.check_completeness()) symlink_dir = os.path.join(self.root, "symlinks") FileSystem.create_directory(symlink_dir) with self.assertRaises(ValueError): self.assertTrue(len(FileSystem.find("*EEF", symlink_dir)), 0) g.link(symlink_dir) self.assertEqual(len(FileSystem.find("*EEF", symlink_dir)), 15) FileSystem.remove_directory(symlink_dir) self.assertFalse(os.path.isdir(symlink_dir)) FileSystem.remove_file(os.path.join(self.root, "wget-log")) if not os.getcwd() == g.out_path: FileSystem.remove_directory(g.out_path) self.assertFalse(os.path.exists(g.out_path))
def generate(self, **kwargs): from datetime import datetime from Common import FileSystem from Chain.AuxFile import CAMSFile end_date = datetime(2099, 1, 1, 23, 59, 59) mission_param = kwargs.get("mission", self.get_mission()) mission_specifier = "_" if self.platform == "sentinel2" else "" basename = "_".join([ self.mission_short[self.platform], "TEST", "EXO", "CAMS", self.date.strftime("%Y%m%dT%H%M%S"), end_date.strftime("%Y%m%dT%H%M%S") ]) self.dbl = os.path.join(self.root, basename + ".DBL.DIR") self.hdr = os.path.join(self.root, basename + ".HDR") FileSystem.create_directory(self.dbl) self.create_dummy_hdr(self.hdr, mission=mission_param + mission_specifier) return CAMSFile(self.dbl)
def setUpClass(cls): # Note those directories are not destroyed after executing tests. # This is in order to avoid multiple downloads amongst different test classes. FileSystem.create_directory(cls.raw_gsw) FileSystem.create_directory(cls.raw_eudem) # Setup dummy EuDEM file of reduced resolution arr = np.zeros((400, 400), dtype=np.float32) to_zip = [ os.path.join(cls.raw_eudem, "eu_dem_v11_E30N20.TIF"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N20.TIF.ovr"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N20.TIF.aux.xml"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N20.TFw") ] write_geotiff(arr, to_zip[0], cls.projection, cls.geotransform_e30n20) [TestFunctions.touch(f) for f in to_zip[1:]] with zipfile.ZipFile( os.path.join(cls.raw_eudem, "eu_dem_v11_E30N20.zip"), 'w') as zip_archive: for file in to_zip: zip_archive.write(file, compress_type=zipfile.ZIP_DEFLATED) FileSystem.remove_file(to_zip[0]) # Setup dummy EuDEM file of reduced resolution arr = np.ones((400, 400), dtype=np.float32) # Add some other value to around half of the image arr[:, 260:] = 10 to_zip = [ os.path.join(cls.raw_eudem, "eu_dem_v11_E30N30.TIF"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N30.TIF.ovr"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N30.TIF.aux.xml"), os.path.join(cls.raw_eudem, "eu_dem_v11_E30N30.TFw") ] write_geotiff(arr, to_zip[0], cls.projection, cls.geotransform_e30n30) [TestFunctions.touch(f) for f in to_zip[1:]] with zipfile.ZipFile( os.path.join(cls.raw_eudem, "eu_dem_v11_E30N30.zip"), 'w') as zip_archive: for file in to_zip: zip_archive.write(file, compress_type=zipfile.ZIP_DEFLATED) FileSystem.remove_file(to_zip[0])
def create_archive(output_dir, netcdf, output_file_basename): """ Create the DBL.DIR folder :param output_dir: The output directory :param netcdf: The list of netcdf files :param output_file_basename: The basename of the cams file of format .._EXO_CAMS_... :return: The DBL folder path as well as the relative path to the cams files. """ import shutil from Common import FileSystem destination_filename = "{}.DBL".format(output_file_basename) destination_filepath = os.path.join(output_dir, destination_filename) dbl_dir = os.path.join(output_dir, output_file_basename + ".DBL.DIR") FileSystem.create_directory(dbl_dir) cams_file_to_return = [] for ncf in netcdf: shutil.copy(ncf, os.path.join(dbl_dir, os.path.basename(ncf))) cams_file_to_return.append( os.path.join(destination_filename + ".DIR", os.path.basename(ncf))) return destination_filepath, cams_file_to_return
def generate(self, **kwargs): import random from Chain import Product from Common import TestFunctions, FileSystem platform_specifier = random.choice( self.platform_options["L2A"][self.platform]) ms = kwargs.get("ms", random.randint(0, 999)) version = kwargs.get("version", random.randint(0, 9)) date_str = self.date.strftime("%Y%m%d-%H%M%S-") + str(ms).zfill(3) product_name = "_".join([ platform_specifier, date_str, "L2A", self.tile, random.choice("DC"), "V" + str(version) + "-" + str(version) ]) product_path = os.path.join(self.root, product_name) self.prod = product_path metadata_path = os.path.join(product_path, product_name + "_MTD_ALL.xml") self.mtd = metadata_path FileSystem.create_directory(product_path) TestFunctions.touch(metadata_path) return Product.MajaProduct.factory(self.prod)
def setUpClass(cls): from Chain import DummyFiles from Common import FileSystem cls.product_root = os.path.join(cls.root, cls.tile) FileSystem.create_directory(cls.product_root) DummyFiles.L1Generator(cls.product_root, tile=cls.tile, date=cls.start_product, platform="sentinel2").generate() DummyFiles.L1Generator(cls.product_root, tile=cls.tile, date=cls.end_product, platform="sentinel2").generate() for d in set([DummyFiles.random_date() for _ in range(cls.n_dummies)]): DummyFiles.L1Generator(cls.product_root, tile=cls.tile, date=d, platform="sentinel2").generate() DummyFiles.L2Generator(cls.product_root, tile=cls.tile, date=d, platform="sentinel2").generate() for d in set([DummyFiles.random_date() for _ in range(cls.n_not_used)]): DummyFiles.L1Generator(cls.product_root, date=d, platform="sentinel2").generate() DummyFiles.L2Generator(cls.product_root, date=d, platform="sentinel2").generate() cls.folders_file = os.path.join(cls.root, "test_working_folders_file.txt") modify_folders_file(cls.template_folders_file, new_file=cls.folders_file, exeMaja=os.getcwd(), repWork=os.getcwd(), repGIPP=os.getcwd(), repL1=os.getcwd(), repL2=os.getcwd(), repMNT=os.getcwd()) cls.mnt = DummyFiles.MNTGenerator(root=cls.root, tile=cls.tile, platform="sentinel2") cls.mnt.generate() cls.cams = os.path.join(cls.root, "CAMS") os.makedirs(cls.cams) DummyFiles.CAMSGenerator(cls.cams, cls.start_product).generate() DummyFiles.CAMSGenerator(cls.cams, cls.end_product).generate() for _ in range(cls.n_not_used): DummyFiles.CAMSGenerator(cls.cams).generate() assert os.path.isfile(cls.folders_file)
def test_reg_s2_natif(self): tiles = ["29RPQ", "32TMR", "55HBC", "55HBD"] dates = [ "20170412T110621", "20180316T103021", "20160814T002112", "20160926T003552" ] levels = ["l1c", "l1c", "l2a", "l2a"] for prod, tile, date, level in zip(self.prod_s2_nat, tiles, dates, levels): p = MajaProduct.factory(prod) self.assertIsInstance(p, Sentinel2Natif) self.assertEqual(p.level, level) self.assertEqual(p.platform, "sentinel2") self.assertEqual(p.type, "natif") self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertTrue(os.path.basename(p.metadata_file), "MTD_MSIL1C.xml") self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, True) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{ "name": "R1", "val": "10 -10" }, { "name": "R2", "val": "20 -20" }]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_s2_prd + self.prod_s2_ssc + self.prod_s2_mus + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, Sentinel2Natif)
def test_reg_s2_muscate(self): tiles = ["31TCH", "31TCH", "31UFR"] levels = ["l1c", "l2a", "l3a"] dates = ["20171008T105012", "20161206T105012", "20190415T000000"] validity = [True, False, False] for prod, tile, date, level, valid in zip(self.prod_s2_mus, tiles, dates, levels, validity): p = MajaProduct.factory(prod) self.assertIsInstance(p, Sentinel2Muscate) self.assertEqual(p.level, level) self.assertEqual(p.platform, "sentinel2") self.assertEqual(p.type, "muscate") self.assertEqual(p.nodata, -10000) self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertTrue( os.path.basename(p.metadata_file).endswith("_MTD_ALL.xml")) self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, valid) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{ "name": "R1", "val": "10 -10" }, { "name": "R2", "val": "20 -20" }]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_s2_prd + self.prod_s2_ssc + self.prod_s2_nat + self.prods_other: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, Sentinel2Muscate)
def __init__(self, root, platform, gtype, cams=False, **kwargs): """ Set the path to the root gipp folder :param root: The full path to the root gipp folder :param platform: The platform name. Has to be in ["sentinel2", "landsat8", "venus"] :param gtype: The gipp type. Has to be in ["muscate", "natif", "tm"] :param cams: Build GIPP with CAMS models :param log_level: The log level for the messages displayed. """ from Common import FileSystem self.fpath = os.path.realpath(root) if platform not in self.platforms: raise ValueError("Unknown platform found: %s" % platform) if gtype not in self.plugins[platform]: raise ValueError("No Plugin of type %s existing for platform %s" % (gtype, platform)) if gtype == "tm" and platform in ["landsat8", "venus"]: self.gtype = "natif" else: self.gtype = gtype self.platform = platform self.cams_suffix = "_CAMS" if cams else "" self.log_level = kwargs.get("log_level", logging.INFO) self.lut_date = kwargs.get("date", datetime.now()) self.cams_46r1 = datetime(2019, 7, 10) self.n_sat = 2 if platform == "sentinel2" else 1 # Create root if not existing: FileSystem.create_directory(self.fpath) # Create folder names: self.gipp_archive = os.path.join(self.fpath, "archive.zip") self.lut_archive = os.path.join(self.fpath, "lut_archive.zip") self.temp_folder = os.path.join(self.fpath, "tempdir") self.gipp_folder_name = "%s_%s" % ( self.platform.upper(), self.gtype.upper()) + self.cams_suffix self.out_path = os.path.join(self.fpath, self.gipp_folder_name)
def test_reg_s2_ssc(self): tiles = ["36JTT", "21MXT"] dates = ["20160914T120000", "20180925T120000"] levels = ["l2a", "l1c"] validity = [True, True] for prod, tile, date, level, valid in zip(self.prod_s2_ssc, tiles, dates, levels, validity): p = MajaProduct.factory(prod) self.assertIsInstance(p, Sentinel2SSC) self.assertEqual(p.level, level) self.assertEqual(p.platform, "sentinel2") self.assertEqual(p.type, "natif") self.assertEqual(p.tile, tile) self.assertEqual(p.date.strftime("%Y%m%dT%H%M%S"), date) self.assertEqual(os.path.basename(p.metadata_file), prod.split(".")[0] + ".HDR") self.assertTrue(os.path.exists(p.metadata_file)) self.assertEqual(p.validity, valid) link_dir = "linkdir" FileSystem.create_directory(link_dir) p.link(link_dir) self.assertTrue(os.path.islink(os.path.join(link_dir, p.base))) self.assertEqual(p.mnt_resolutions_dict, [{ "name": "R1", "val": "10 -10" }, { "name": "R2", "val": "20 -20" }]) self.assertEqual(p, p) FileSystem.remove_directory(link_dir) # Other prods: for prod in self.prod_s2_prd + self.prod_s2_nat + self.prod_s2_mus: p = MajaProduct.factory(prod) self.assertNotIsInstance(p, Sentinel2SSC)
def setUp(self): self.wdir = os.path.join(os.getcwd(), "wp_wdir") self.outdir = os.path.join(os.getcwd(), "wp_outdir") self.l1 = DummyFiles.L1Generator(root=os.getcwd(), tile="T11ABC", platform="sentinel2").generate() self.l1_list = [DummyFiles.L1Generator(root=os.getcwd(), tile="T11ABC", platform="sentinel2").generate() for _ in range(3)] self.other_l1 = [DummyFiles.L1Generator(root=os.getcwd()).generate() for _ in range(3)] self.l2 = DummyFiles.L2Generator(root=self.outdir, tile="T11ABC", platform="sentinel2").generate() self.other_l2 = [DummyFiles.L2Generator(root=self.outdir).generate() for _ in range(3)] self.gipp_dir = os.path.join(os.getcwd(), "gipp_dir") FileSystem.create_directory(self.gipp_dir) self.gipp = DummyFiles.GippGenerator(root=self.gipp_dir, platform="sentinel2").generate() self.cams = DummyFiles.CAMSGenerator(root=os.getcwd(), date=self.l1.date, platform="sentinel2").generate() FileSystem.create_directory(self.wdir) FileSystem.create_directory(self.outdir)
def __set_input_paths(self): """ Set the available L1 and L2 input paths. :return: The L1-, L2- folders (creating the latter if needed). An info-string whether site and/or tile is used. """ if self.site: site_l1 = FileSystem.find_single(r"^%s$" % self.site, self.rep_l1) self.logger.warning(self.tile) path_input_l1 = FileSystem.find_single(r"^T?%s$" % self.tile, site_l1) try: site_l2 = FileSystem.find_single(r"^%s$" % self.site, self.rep_l2) except ValueError: site_l2 = os.path.join(self.rep_l2, self.site) FileSystem.create_directory(site_l2) try: path_input_l2 = FileSystem.find_single(r"^T?%s$" % self.tile, site_l2) except ValueError: path_input_l2 = os.path.join(self.rep_l2, self.site, self.tile) FileSystem.create_directory(path_input_l2) site_info = "site %s and tile %s" % (self.site, self.tile) else: path_input_l1 = FileSystem.find_single(r"^T?%s$" % self.tile, self.rep_l1) try: path_input_l2 = FileSystem.find_single(r"^T?%s$" % self.tile, self.rep_l2) except ValueError: path_input_l2 = os.path.join(self.rep_l2, self.tile) FileSystem.create_directory(path_input_l2) site_info = "tile %s" % self.tile return path_input_l1, path_input_l2, site_info
def get_synthetic_band(self, synthetic_band, **kwargs): wdir = kwargs.get("wdir", self.fpath) output_folder = os.path.join(wdir, self.base) output_bname = "_".join([self.base.split(".")[0], synthetic_band.upper() + ".tif"]) output_filename = kwargs.get("output_filename", os.path.join(output_folder, output_bname)) max_value = kwargs.get("max_value", 10000.) # Skip existing: if os.path.exists(output_filename): return output_filename if synthetic_band.lower() == "ndvi": FileSystem.create_directory(output_folder) b4 = self.find_file(pattern=r"*B0?4(_10m)?.jp2$")[0] b8 = self.find_file(pattern=r"*B0?8(_10m)?.jp2$")[0] ds_red = GDalDatasetWrapper.from_file(b4) ds_nir = GDalDatasetWrapper.from_file(b8) ds_ndvi = ImageApps.get_ndvi(ds_red, ds_nir, vrange=(0, max_value), dtype=np.int16) ds_ndvi.write(output_filename, options=["COMPRESS=DEFLATE"]) elif synthetic_band.lower() == "ndsi": FileSystem.create_directory(output_folder) b3 = self.find_file(pattern=r"*B0?3(_10m)?.jp2$")[0] b11 = self.find_file(pattern=r"*B11(_20m)?.jp2$")[0] ds_green = ImageTools.gdal_translate(b3, tr="20 20", r="cubic") ds_swir = GDalDatasetWrapper.from_file(b11) ds_ndsi = ImageApps.get_ndsi(ds_green, ds_swir, vrange=(0, max_value), dtype=np.int16) ds_ndsi.write(output_filename, options=["COMPRESS=DEFLATE"]) elif synthetic_band.lower() == "mca_sim": FileSystem.create_directory(output_folder) b4 = self.find_file(pattern=r"*B0?4(_10m)?.jp2$")[0] b3 = self.find_file(pattern=r"*B0?3(_10m)?.jp2$")[0] img_red, drv = ImageIO.tiff_to_array(b4, array_only=False) img_green = ImageIO.tiff_to_array(b3) img_mcasim = (img_red + img_green) / 2 ImageIO.write_geotiff_existing(img_mcasim, output_filename, drv, options=["COMPRESS=DEFLATE"]) else: raise ValueError("Unknown synthetic band %s" % synthetic_band) return output_filename
def to_maja_format(self, platform_id, mission_field, mnt_resolutions, coarse_res, full_res_only=False): """ Writes an MNT in Maja (=EarthExplorer) format: A folder .DBL.DIR containing the rasters and an accompanying .HDR xml-file. The two files follow the maja syntax:: *AUX_REFDE2*.(HDR|DBL.DIR) :param platform_id: The platform ID of two digits (e.g. S2_ for Sentinel2A/B; VS for Venus) :param mission_field: Similar to the platform ID, this is used in the <Mission>-field for the HDR file. e.g. SENTINEL-2 for S2 :param mnt_resolutions: A dict containing the resolutions for the given sensor. E.g.:: {"XS": (10, -10)} :param coarse_res: A tuple of int describing the coarse resolution. E.g.:: (240, -240). :param full_res_only: If True, no coarse_res rasters will be created. :return: Writes the .DBL.DIR and .HDR into the specified self.dem_dir """ assert len(mnt_resolutions) >= 1 basename = str( "%s_TEST_AUX_REFDE2_%s_%s" % (platform_id, self.site.nom, str(self.dem_version).zfill(4))) # Get mnt data mnt_max_res = self.prepare_mnt() # Water mask not needed with optional coarse_res writing: if coarse_res and not full_res_only: # Get water data self.prepare_water_data() mnt_res = (self.site.res_x, self.site.res_y) dbl_base = basename + ".DBL.DIR" dbl_dir = os.path.join(self.dem_dir, dbl_base) FileSystem.create_directory(dbl_dir) hdr = os.path.join(self.dem_dir, basename + ".HDR") # Calulate gradient mask at MNT resolution: mnt_in, drv = ImageIO.tiff_to_array(mnt_max_res, array_only=False) grad_y_mnt, grad_x_mnt = self.calc_gradient(mnt_in, self.site.res_x, self.site.res_y) full_res = (int(mnt_resolutions[0]["val"].split(" ")[0]), int(mnt_resolutions[0]["val"].split(" ")[1])) grad_x = self.resample_to_full_resolution(grad_x_mnt, mnt_resolution=mnt_res, full_resolution=full_res, order=3) grad_y = self.resample_to_full_resolution(grad_y_mnt, mnt_resolution=mnt_res, full_resolution=full_res, order=3) slope, aspect = self.calc_slope_aspect(grad_y, grad_x) # Write full res slope and aspect geotransform = list(drv.GetGeoTransform()) geotransform[1] = float(full_res[0]) geotransform[-1] = float(full_res[1]) projection = drv.GetProjection() tmp_asp = tempfile.mktemp(dir=self.wdir, suffix="_asp.tif") ImageIO.write_geotiff(aspect, tmp_asp, projection, tuple(geotransform)) tmp_slp = tempfile.mktemp(dir=self.wdir, suffix="_slp.tif") ImageIO.write_geotiff(slope, tmp_slp, projection, tuple(geotransform)) # Full resolution: write_resolution_name = True if len(mnt_resolutions) > 1 else False # Names for R1, R2 etc. rasters_written = [] path_alt, path_asp, path_slp = "", "", "" all_paths_alt = [] for res in mnt_resolutions: # ALT: bname_alt = basename + "_ALT" bname_alt += "_" + str( res["name"]) if write_resolution_name else "" bname_alt += ".TIF" rel_alt = os.path.join(dbl_base, bname_alt) path_alt = os.path.join(self.dem_dir, rel_alt) all_paths_alt.append(path_alt) ImageTools.gdal_warp(mnt_max_res, dst=path_alt, tr=res["val"], r="cubic", multi=True) rasters_written.append(rel_alt) # ASP: bname_asp = basename + "_ASP" bname_asp += "_" + res["name"] if write_resolution_name else "" bname_asp += ".TIF" rel_asp = os.path.join(dbl_base, bname_asp) path_asp = os.path.join(self.dem_dir, rel_asp) ImageTools.gdal_warp(tmp_asp, dst=path_asp, tr=res["val"], r="cubic", multi=True) rasters_written.append(rel_asp) # SLP: bname_slp = basename + "_SLP" bname_slp += "_" + res["name"] if write_resolution_name else "" bname_slp += ".TIF" rel_slp = os.path.join(dbl_base, bname_slp) path_slp = os.path.join(self.dem_dir, rel_slp) ImageTools.gdal_warp(tmp_slp, dst=path_slp, tr=res["val"], r="cubic", multi=True) rasters_written.append(rel_slp) # Optional coarse_res writing: if coarse_res and not full_res_only: # Resize all rasters for coarse res. coarse_res_str = str(coarse_res[0]) + " " + str(coarse_res[1]) # ALC: bname_alc = basename + "_ALC.TIF" rel_alc = os.path.join(dbl_base, bname_alc) path_alc = os.path.join(self.dem_dir, rel_alc) ImageTools.gdal_warp(path_alt, dst=path_alc, tr=coarse_res_str, multi=True) rasters_written.append(rel_alc) # ALC: bname_asc = basename + "_ASC.TIF" rel_asc = os.path.join(dbl_base, bname_asc) path_asc = os.path.join(self.dem_dir, rel_asc) ImageTools.gdal_warp(path_asp, dst=path_asc, tr=coarse_res_str, multi=True) rasters_written.append(rel_asc) # ALC: bname_slc = basename + "_SLC.TIF" rel_slc = os.path.join(dbl_base, bname_slc) path_slc = os.path.join(self.dem_dir, rel_slc) ImageTools.gdal_warp(path_slp, dst=path_slc, tr=coarse_res_str, multi=True) rasters_written.append(rel_slc) # Water mask: bname_msk = basename + "_MSK.TIF" rel_msk = os.path.join(dbl_base, bname_msk) path_msk = os.path.join(self.dem_dir, rel_msk) ImageTools.gdal_warp(self.gsw_dst, dst=path_msk, tr=coarse_res_str, multi=True) rasters_written.append(rel_msk) # Write HDR Metadata: date_start = datetime(1970, 1, 1) date_end = datetime(2100, 1, 1) dem_info = DEMInfo(self.site, all_paths_alt[0]) root = self._get_root() self._create_hdr(root, mission_field, basename, rasters_written, dem_info, date_start, date_end, self.dem_version) XMLTools.write_xml(root, hdr) # Remove temp files: FileSystem.remove_file(tmp_asp) FileSystem.remove_file(tmp_slp) FileSystem.remove_file(mnt_max_res) return hdr, dbl_dir
def setUpClass(cls): from Common import FileSystem FileSystem.create_directory(cls.root)
def setUpClass(cls): # Note those directories are not destroyed after executing tests. # This is in order to avoid multiple downloads amongst different test classes. FileSystem.create_directory(cls.raw_gsw) FileSystem.create_directory(cls.raw_srtm)
time = ["00", "12"] # Step du forecast voulu # step = 3 indique qu'on telecharge les previsions a 3h apres l'heure de l'analyse. # Exemples : time = 00 et step = 3 => 03:00:00 UTC # time = 12 et step = 3 => 15:00:00 UTC step_choice = "3" # Choix des fichiers a telecharger # Surface : AOT (aerosol optical thickness) # Pressure : RH (relative humidity) # Model : MR (mixing ratios) ftype = {'surface': True, 'pressure': True, 'model': True} # Create directories from Common import FileSystem FileSystem.create_directory(args.archive_dir) FileSystem.create_directory(args.write_dir) # Boucle sur les jours a telecharger for i in range(nb_days): date = dt1 + datetime.timedelta(days=i) print("==================================") print("Downloading files for date %s" % date) print("==================================") for t in range(len(time)): aot, mr, rh = download_files(date, ftype, time[t], step_choice, args.write_dir) # Conversion to MAJA DBL/HDR format RawCAMSArchive.process_one_file(args.archive_dir, aot, rh, mr, args.platform) if not args.keep:
def parse_config(self, cfg_file): """ Read contents of the config/folders.txt file containing: Required params: repWork, repL1, repL2, repMNT, exeMaja Optional params: repCAMS :param cfg_file: The path to the file :return: The parsed paths for each of the directories. None for the optional ones if not given. """ import configparser as cfg # Parsing configuration file config = cfg.ConfigParser() config.read(cfg_file) # Maja_Inputs rep_work = os.path.realpath( os.path.expanduser(config.get("Maja_Inputs", "repWork"))) if not p.isdir(rep_work): FileSystem.create_directory(rep_work) rep_gipp = os.path.realpath( os.path.expanduser(config.get("Maja_Inputs", "repGipp"))) if not p.isdir(rep_gipp): FileSystem.create_directory(rep_gipp) rep_l1 = StartMaja.__read_config_param(config, "Maja_Inputs", "repL1") rep_l2 = os.path.realpath( os.path.expanduser(config.get("Maja_Inputs", "repL2"))) if not p.isdir(rep_l2): FileSystem.create_directory(rep_l2) exe_maja = StartMaja.__read_config_param(config, "Maja_Inputs", "exeMaja") rep_mnt = os.path.realpath( os.path.expanduser(config.get("Maja_Inputs", "repMNT"))) if not p.isdir(rep_mnt): FileSystem.create_directory(rep_mnt) # CAMS is optional: try: rep_cams = os.path.realpath( os.path.expanduser(config.get("Maja_Inputs", "repCAMS"))) except cfg.NoOptionError: self.logger.warning("repCAMS is missing. Processing without CAMS") rep_cams = None # DTM_Creation rep_raw = os.path.realpath( os.path.expanduser(config.get("DTM_Creation", "repRAW"))) if not p.isdir(rep_raw): FileSystem.create_directory(rep_raw) rep_gsw = os.path.realpath( os.path.expanduser(config.get("DTM_Creation", "repGSW"))) if not p.isdir(rep_gsw): FileSystem.create_directory(rep_gsw) return rep_work, rep_gipp, rep_l1, rep_l2, exe_maja, rep_cams, rep_mnt, rep_raw, rep_gsw