def generate_region( workdir: click.Path, satellite_data_provider: str = DEF_SAT_PROVIDER, brdf_shapefile: click.Path = BRDFSHAPEFILE, one_deg_dsm_v1_shapefile: click.Path = ONEDEGDSMV1SHAPEFILE, one_sec_dsm_v1_shapefile: click.Path = ONESECDSMV1SHAPEFILE, one_deg_dsm_v2_shapefile: click.Path = ONEDEGDSMV2SHAPEFILE, world_wrs_shapefile: click.Path = WRSSHAPEFILE, world_mgrs_shapefile: click.Path = MGRSSHAPEFILE, ): """ :return: list of scenes to ARD process """ workdir = Path(workdir).resolve() # set up the scene select job dir in the work dir jobid = uuid.uuid4().hex[0:6] jobdir = workdir.joinpath(FMT2.format(jobid=jobid)) jobdir.mkdir(exist_ok=True) # print("Job directory: " + str(jobdir)) logging.basicConfig(filename=jobdir.joinpath(LOG_FILE), level=logging.INFO) # INFO # needed build the allowed_codes using the shapefiles _extent_list = [brdf_shapefile, one_deg_dsm_v1_shapefile, one_sec_dsm_v1_shapefile, one_deg_dsm_v2_shapefile] global_tiles_data = Path(world_wrs_shapefile) if satellite_data_provider == "ESA": global_tiles_data = Path(world_mgrs_shapefile) allowed_codes = subset_global_tiles_to_ga_extent(global_tiles_data, _extent_list, satellite_data_provider) # AOI_FILE aoi_filepath = jobdir.joinpath(AOI_FILE) with open(aoi_filepath, "w") as f: for item in allowed_codes: f.write("%s\n" % item) return aoi_filepath, allowed_codes # This is used for testing
def scene_select( usgs_level1_files: click.Path, allowed_codes: click.Path, config: click.Path, days_delta: int, products: list, logdir: click.Path, brdfdir: click.Path, wvdir: click.Path, stop_logging: bool, log_config: click.Path, scene_limit: int, run_ard: bool, **ard_click_params: dict, ): """ The keys for ard_click_params; test: bool, workdir: click.Path, pkgdir: click.Path, env: click.Path, workers: int, nodes: int, memory: int, jobfs: int, project: str, walltime: str, email: str :return: list of scenes to ARD process """ # pylint: disable=R0913, R0914 # R0913: Too many arguments # R0914: Too many local variables logdir = Path(logdir).resolve() # If we write a file we write it in the job dir # set up the scene select job dir in the log dir jobdir = logdir.joinpath(FMT2.format(jobid=uuid.uuid4().hex[0:6])) jobdir.mkdir(exist_ok=True) # FIXME test this if not stop_logging: gen_log_file = jobdir.joinpath(GEN_LOG_FILE).resolve() fileConfig(log_config, disable_existing_loggers=False, defaults={"genlogfilename": str(gen_log_file)}) LOGGER.info("scene_select", **locals()) # logdir is used both by scene select and ard # So put it in the ard parameter dictionary ard_click_params["logdir"] = logdir if not usgs_level1_files: usgs_level1_files = jobdir.joinpath(ODC_FILTERED_FILE) l1_count = l1_scenes_to_process( usgs_level1_files, products=products, brdfdir=Path(brdfdir).resolve(), wvdir=Path(wvdir).resolve(), region_codes=allowed_codes_to_region_codes(allowed_codes), config=config, scene_limit=scene_limit, days_delta=days_delta, ) _calc_node_with_defaults(ard_click_params, l1_count) # write pbs script run_ard_pathfile = jobdir.joinpath("run_ard_pbs.sh") with open(run_ard_pathfile, "w") as src: src.write(make_ard_pbs(usgs_level1_files, **ard_click_params)) # Make the script executable os.chmod(run_ard_pathfile, os.stat(run_ard_pathfile).st_mode | stat.S_IEXEC) # run the script if run_ard is True: subprocess.run([run_ard_pathfile], check=True) LOGGER.info("info", jobdir=str(jobdir)) print("Job directory: " + str(jobdir))