def from_zones( dir_name, inv_name, n_inv, n_bins, params, out_name, x, lop, angles, wav, spct, viirs, refl, bool_array, ): print("Building inputs from zones inventory.") # lamps distribution zonData = pt.parse_inventory(inv_name, n_inv) sources = np.unique([lamp[2] for zd in zonData for lamp in zd]) for n in range(n_bins): for s in sources: np.savetxt( dir_name + "fctem_wl_%g_lamp_%s.dat" % (x[n], s), np.concatenate([lop[s], angles]).reshape((2, -1)).T, ) with open(dir_name + "lamps.lst", "w") as zfile: zfile.write("\n".join(sources) + "\n") print("Making zone properties files.") circles = MSD.from_domain("domain.ini") # Same geolocalisation zonfile = np.loadtxt(params["zones_inventory"], usecols=list(range(7)), ndmin=2) # zone number for i, dat in enumerate(zonfile, 1): circles.set_circle((dat[0], dat[1]), dat[2] * 1000, i) circles.save(dir_name + out_name + "_zone") weights = [sum(z[0] for z in zone) for zone in zonData] for w, dat in zip(weights, zonfile): circles.set_circle((dat[0], dat[1]), dat[2] * 1000, bool(w)) circles.save(dir_name + "origin") for n, name in zip(range(3, 7), ["obsth", "obstd", "obstf", "altlp"]): for i, dat in enumerate(zonfile, 1): circles.set_circle((dat[0], dat[1]), dat[2] * 1000, dat[n]) circles.save(dir_name + out_name + "_" + name) print("Inverting lamp intensity.") viirs_dat = MSD.Open("stable_lights.hdf5") for i in range(len(viirs_dat)): viirs_dat[i] *= 1e-5 # nW/cm^2/sr -> W/m^2/sr viirs_dat[i][viirs_dat[i] < 0] = 0.0 water_mask = MSD.Open("water_mask.hdf5") for i, wm in enumerate(water_mask): viirs_dat[i][wm == 0] = 0.0 circles = MSD.Open(dir_name + out_name + "_zone.hdf5") zon_mask = np.empty(len(circles), dtype=object) for i in range(len(zon_mask)): zon_mask[i] = (np.arange(1, len(zonfile) + 1)[:, None, None] == circles[i]) a = np.deg2rad(angles) mids = np.concatenate([[a[0]], np.mean([a[1:], a[:-1]], 0), [a[-1]]]) sinx = 2 * np.pi * (np.cos(mids[:-1]) - np.cos(mids[1:])) # Pixel size in m^2 S = np.array([viirs_dat.pixel_size(i)**2 for i in range(len(viirs_dat))]) # Calculate zones lamps zones = pt.make_zones(angles, lop, wav, spct, zonData, sources) # phie = DNB * S / int( R ( rho/pi Gdown + Gup ) ) dlambda Gdown = np.tensordot(zones[:, :, angles > 90], sinx[angles > 90], axes=([2], [0])) Gup = (np.tensordot( zones[:, :, angles < 70], sinx[angles < 70], axes=([2], [0])) / sinx[angles < 70].sum()) integral = np.sum(viirs * (Gdown * refl / np.pi + Gup), (1, 2)) * (wav[1] - wav[0]) phie = [ pt.safe_divide( viirs_dat[i] * S[i], np.sum(zon_mask[i] * integral[:, None, None], 0), ) for i in range(len(S)) ] ratio = [ np.tensordot(zones[..., ind], sinx, axes=([2], [0])).mean(-1) for ind in bool_array ] for n in range(n_bins): r = [ np.sum(zon_mask[layer][:, None] * ratio[n][:, :, None, None], 0) for layer in range(len(phie)) ] for i, s in enumerate(sources): new = MSD.from_domain("domain.ini") for layer in range(len(new)): new[layer] = phie[layer] * r[layer][i] new.save(dir_name + "%s_%g_lumlp_%s" % (out_name, x[n], s))
def batches(input_path, compact, batch_size, batch_name=None): """Makes the execution batches. INPUT_PATH is the path to the folder containing the inputs. BATCH_NAME is an optional name for the produced batch files. It overwrites the one defined in 'inputs_params.in' is given. """ os.chdir(input_path) with open("inputs_params.in") as f: params = yaml.safe_load(f) if batch_name is not None: params["batch_file_name"] = batch_name for fname in glob("%s*" % params["batch_file_name"]): os.remove(fname) exp_name = params["exp_name"] ds = MSD.Open(glob("*.hdf5")[0]) # Pre process the obs extract print("Preprocessing...") shutil.rmtree("obs_data", True) lats, lons = ds.get_obs_pos() xs, ys = ds.get_obs_pos(proj=True) for lat, lon in zip(lats, lons): for i in range(len(ds)): os.makedirs("obs_data/%6f_%6f/%d" % (lat, lon, i)) for i, fname in enumerate(progress(glob("*.hdf5")), 1): dataset = MSD.Open(fname) for clipped in dataset.split_observers(): lat, lon = clipped.get_obs_pos() lat, lon = lat[0], lon[0] if "lumlp" in fname: clipped.set_buffer(0) clipped.set_overlap(0) for i, dat in enumerate(clipped): padded_dat = np.pad(dat, (512 - dat.shape[0]) // 2, "constant") save_bin( "obs_data/%6f_%6f/%i/%s" % (lat, lon, i, fname.rsplit(".", 1)[0] + ".bin"), padded_dat, ) if "srtm" in fname: for j in range(len(clipped)): clipped[j][:] = 0 clipped.save("obs_data/%6f_%6f/blank" % (lat, lon)) # Add wavelength and multiscale params["wavelength"] = np.loadtxt("wav.lst", ndmin=1).tolist() params["layer"] = list(range(len(ds))) params["observer_coordinates"] = list(zip(*ds.get_obs_pos())) bandwidth = (params["lambda_max"] - params["lambda_min"]) / params["nb_bins"] wls = params["wavelength"] refls = np.loadtxt("refl.lst", ndmin=1).tolist() for pname in ["layer", "observer_coordinates"]: if len(params[pname]) == 1: params[pname] = params[pname][0] with open("lamps.lst") as f: lamps = f.read().split() if os.path.isfile("brng.lst"): brng = np.loadtxt("brng.lst", ndmin=1) # Clear and create execution folder dir_name = "exec" + os.sep shutil.rmtree(dir_name, True) os.makedirs(dir_name) count = 0 multival = [k for k in params if isinstance(params[k], list)] multival = sorted(multival, key=len, reverse=True) # Semi-arbitrary sort param_space = [params[k] for k in multival] N = np.product([len(p) for p in param_space]) for param_vals in progress(product(*param_space), max_value=N): local_params = OrderedDict(zip(multival, param_vals)) P = ChainMap(local_params, params) if ("azimuth_angle" in multival and P["elevation_angle"] == 90 and params["azimuth_angle"].index(P["azimuth_angle"]) != 0): continue if os.path.isfile("brng.lst"): obs_index = (0 if "observer_coordinates" not in multival else params["observer_coordinates"].index( P["observer_coordinates"])) bearing = brng[obs_index] else: bearing = 0 coords = "%6f_%6f" % P["observer_coordinates"] if "observer_coordinates" in multival: P["observer_coordinates"] = coords if compact: fold_name = (dir_name + os.sep.join( "%s_%s" % (k, v) for k, v in local_params.items() if k in ["observer_coordinates", "wavelength", "layer"]) + os.sep) else: fold_name = (dir_name + os.sep.join("%s_%s" % (k, v) for k, v in local_params.items()) + os.sep) unique_ID = "-".join("%s_%s" % item for item in local_params.items()) wavelength = "%g" % P["wavelength"] layer = P["layer"] reflectance = refls[wls.index(P["wavelength"])] if not os.path.isdir(fold_name): os.makedirs(fold_name) # Linking files mie_file = "%s_%s.txt" % (params["aerosol_profile"], wavelength) os.symlink(os.path.relpath(mie_file, fold_name), fold_name + "aerosol.txt") layer_file = "%s_%s.txt" % (params["layer_type"], wavelength) os.symlink(os.path.relpath(layer_file, fold_name), fold_name + "layer.txt") os.symlink( os.path.relpath("MolecularAbs.txt", fold_name), fold_name + "MolecularAbs.txt", ) for i, lamp in enumerate(lamps, 1): os.symlink( os.path.relpath( "fctem_wl_%s_lamp_%s.dat" % (wavelength, lamp), fold_name, ), fold_name + exp_name + "_fctem_%03d.dat" % i, ) illumpath = os.path.dirname(illum.__path__[0]) os.symlink( os.path.abspath(illumpath + "/bin/illumina"), fold_name + "illumina", ) # Copying layer data obs_fold = os.path.join("obs_data", coords, str(layer)) os.symlink( os.path.relpath(os.path.join(obs_fold, "srtm.bin"), fold_name), fold_name + exp_name + "_topogra.bin", ) os.symlink( os.path.relpath(os.path.join(obs_fold, "origin.bin"), fold_name), fold_name + "origin.bin", ) for name in ["obstd", "obsth", "obstf", "altlp"]: os.symlink( os.path.relpath( os.path.join(obs_fold, "%s_%s.bin" % (exp_name, name)), fold_name, ), fold_name + "%s_%s.bin" % (exp_name, name), ) for i, lamp in enumerate(lamps, 1): os.symlink( os.path.relpath( os.path.join( obs_fold, "%s_%s_lumlp_%s.bin" % (exp_name, wavelength, lamp), ), fold_name, ), fold_name + "%s_lumlp_%03d.bin" % (exp_name, i), ) # Create illumina.in input_data = ( (("", "Input file for ILLUMINA"), ), ((exp_name, "Root file name"), ), ( (ds.pixel_size(layer), "Cell size along X [m]"), (ds.pixel_size(layer), "Cell size along Y [m]"), ), (("aerosol.txt", "Aerosol optical cross section file"), ), ( ("layer.txt", "Layer optical cross section file"), (P["layer_aod"], "Layer aerosol optical depth at 500nm"), (P["layer_alpha"], "Layer angstom coefficient"), (P["layer_height"], "Layer scale height [m]"), ), ((P["double_scattering"] * 1, "Double scattering activated"), ), ((P["single_scattering"] * 1, "Single scattering activated"), ), ((wavelength, "Wavelength [nm]"), (bandwidth, "Bandwidth [nm]")), ((reflectance, "Reflectance"), ), ((P["air_pressure"], "Ground level pressure [kPa]"), ), ( (P["aerosol_optical_depth"], "Aerosol optical depth at 500nm"), (P["angstrom_coefficient"], "Angstrom exponent"), (P["aerosol_height"], "Aerosol scale height [m]"), ), ((len(lamps), "Number of source types"), ), ((P["stop_limit"], "Contribution threshold"), ), (("", ""), ), ( (256, "Observer X position"), (256, "Observer Y position"), ( P["observer_elevation"], "Observer elevation above ground [m]", ), ), ((P["observer_obstacles"] * 1, "Obstacles around observer"), ), ( (P["elevation_angle"], "Elevation viewing angle"), ( (P["azimuth_angle"] + bearing) % 360, "Azimuthal viewing angle", ), ), ((P["direct_fov"], "Direct field of view"), ), (("", ""), ), (("", ""), ), (("", ""), ), (( P["reflection_radius"], "Radius around light sources where reflextions are computed", ), ), ( ( P["cloud_model"], "Cloud model: " "0=clear, " "1=Thin Cirrus/Cirrostratus, " "2=Thick Cirrus/Cirrostratus, " "3=Altostratus/Altocumulus, " "4=Cumulus/Cumulonimbus, " "5=Stratocumulus", ), (P["cloud_base"], "Cloud base altitude [m]"), (P["cloud_fraction"], "Cloud fraction"), ), (("", ""), ), ) with open(fold_name + unique_ID + ".in", "w") as f: lines = (input_line(*zip(*line_data)) for line_data in input_data) f.write("\n".join(lines)) # Write execute script if not os.path.isfile(fold_name + "execute"): with open(fold_name + "execute", "w") as f: f.write("#!/bin/sh\n") f.write("#SBATCH --job-name=Illumina\n") f.write("#SBATCH --time=%d:00:00\n" % params["estimated_computing_time"]) f.write("#SBATCH --mem=2G\n") f.write("cd %s\n" % os.path.abspath(fold_name)) f.write("umask 0011\n") os.chmod(fold_name + "execute", 0o777) # Append execution to batch list with open(f"{params['batch_file_name']}_{(count//batch_size)+1}", "a") as f: f.write("cd %s\n" % os.path.abspath(fold_name)) f.write("sbatch ./execute\n") f.write("sleep 0.05\n") count += 1 # Add current parameters execution to execution script with open(fold_name + "execute", "a") as f: f.write("cp %s.in illumina.in\n" % unique_ID) f.write("./illumina\n") f.write("mv %s.out %s_%s.out\n" % (exp_name, exp_name, unique_ID)) f.write("mv %s_pcl.bin %s_pcl_%s.bin\n" % (exp_name, exp_name, unique_ID)) print("Final count:", count) print("Done.")
def alternate(name, zones, lights): """Generates an alternate scenario at constant lumen. This scenatio will be based on the content of the `Inputs` folder and will be placed in a folder named `Inputs_NAME`. """ if zones is None and lights is None: print("ERROR: At least one of 'zones' and 'lights' must be provided.") raise SystemExit dirname = "Inputs_%s/" % name if os.path.exists(dirname): shutil.rmtree(dirname) os.makedirs(dirname) with open("inputs_params.in") as f: params = yaml.safe_load(f) if zones is not None and lights is not None: print("Validating the inventories.") lamps = np.loadtxt(lights, usecols=[0, 1]) zones = np.loadtxt(params["zones_inventory"], usecols=[0, 1, 2]) zonData = pt.parse_inventory(zones, 0) hasLights = [sum(x[0] for x in z) != 0 for z in zonData] circles = MSD.from_domain("domain.ini") for dat, b in zip(zones, hasLights): circles.set_circle((dat[0], dat[1]), dat[2] * 1000, b) zones_ind = MSD.from_domain("domain.ini") for i, dat in enumerate(zones, 1): zones_ind.set_circle((dat[0], dat[1]), dat[2] * 1000, i) failed = set() for j, coords in enumerate(lamps, 1): for i in range(len(circles)): try: col, row = circles._get_col_row(coords, i) if circles[i][row, col] and col >= 0 and row >= 0: zon_ind = zones_ind[i][row, col] failed.add((j, coords[0], coords[1], zon_ind)) except IndexError: continue if len(failed): for i, lat, lon, zon_ind in sorted(failed): print( "WARNING: Lamp #%d (%.06g,%.06g) falls within non-null zone #%d" % (i, lat, lon, zon_ind)) raise SystemExit() shutil.copy("Inputs/inputs_params.in", dirname) print("\nLoading data...") # Angular distribution (normalised to 1) lop_files = glob("Lights/*.lop") angles = np.arange(181, dtype=float) lop = { os.path.basename(s).rsplit(".", 1)[0].split("_", 1)[0]: pt.load_lop(angles, s) for s in lop_files } # Spectral distribution (normalised with scotopric vision to 1) wav, viirs = np.loadtxt("Lights/viirs.dat", skiprows=1).T viirs = pt.spct_norm(wav, viirs) scotopic = pt.load_spct(wav, np.ones(wav.shape), "Lights/scotopic.dat", 1) photopic = pt.load_spct(wav, np.ones(wav.shape), "Lights/photopic.dat", 1) ratio_ps = 1.0 norm_spectrum = ratio_ps * photopic + (1 - ratio_ps) * scotopic spct_files = glob("Lights/*.spct") spct = { os.path.basename(s).rsplit(".", 1)[0].split("_", 1)[0]: pt.load_spct(wav, norm_spectrum, s) for s in spct_files } # Make bins if os.path.isfile("spectral_bands.dat"): bins = np.loadtxt("spectral_bands.dat", delimiter=",") n_bins = bins.shape[0] else: n_bins = params["nb_bins"] lmin = params["lambda_min"] lmax = params["lambda_max"] limits = np.linspace(lmin, lmax, n_bins + 1) bins = np.stack([limits[:-1], limits[1:]], axis=1) bool_array = (wav >= bins[:, 0:1]) & (wav < bins[:, 1:2]) x = bins.mean(1).tolist() out_name = params["exp_name"] asper_files = glob("Lights/*.aster") asper = { os.path.basename(s).split(".", 1)[0]: np.loadtxt(s) for s in asper_files } for type in asper: wl, refl = asper[type].T wl *= 1000.0 refl /= 100.0 asper[type] = interp(wl, refl, bounds_error=False, fill_value=0.0)(wav) sum_coeffs = sum(params["reflectance"][type] for type in params["reflectance"]) if sum_coeffs == 0: sum_coeffs = 1.0 refl = sum(asper[type] * coeff / sum_coeffs for type, coeff in params["reflectance"].items()) reflect = [np.mean(refl[mask]) for mask in bool_array] with open(dirname + "/refl.lst", "w") as zfile: zfile.write("\n".join(["%.06g" % n for n in reflect]) + "\n") # Photopic/scotopic spectrum nspct = ratio_ps * photopic + (1 - ratio_ps) * scotopic nspct = nspct / np.sum(nspct) nspct = [np.mean(nspct[mask]) for mask in bool_array] for aero_file in glob("Inputs/*.txt"): shutil.copy(aero_file, aero_file.replace("Inputs", dirname)) shutil.copy("srtm.hdf5", dirname) with open(dirname + "/wav.lst", "w") as zfile: zfile.write("\n".join(map(str, x)) + "\n") if params["zones_inventory"] is not None: dir_name = ".Inputs_zones/" inv_name = params["zones_inventory"] n_inv = 7 shutil.rmtree(dir_name, True) os.makedirs(dir_name) from_zones( dir_name, inv_name, n_inv, n_bins, params, out_name, x, lop, angles, wav, spct, viirs, refl, bool_array, ) oldlumlp = MSD.from_domain("domain.ini") for fname in glob("Inputs/*lumlp*"): ds = MSD.Open(fname) wl = int(fname.split("_")[1]) for i, dat in enumerate(ds): oldlumlp[i] += dat * nspct[x.index(wl)] * dl newlumlp = MSD.from_domain("domain.ini") for fname in glob(os.path.join(dir_name, "*lumlp*")): ds = MSD.Open(fname) wl = int(fname.split("_")[2]) for i, dat in enumerate(ds): newlumlp[i] += dat * nspct[x.index(wl)] * dl ratio = MSD.from_domain("domain.ini") for i in range(len(ratio)): ratio[i] = pt.safe_divide(oldlumlp[i], newlumlp[i]) for fname in glob(os.path.join(dir_name, "*lumlp*")): ds = MSD.Open(fname) for i, dat in enumerate(ratio): ds[i] *= dat ds.save(fname) if params["lamps_inventory"] is not None: dir_name = ".Inputs_lamps/" shutil.rmtree(dir_name, True) os.makedirs(dir_name) from_lamps( dir_name, n_bins, params, out_name, x, lop, angles, wav, spct, viirs, refl, bool_array, ) print("Unifying inputs.") lfiles = {fname.split(os.sep)[-1] for fname in glob(".Inputs_lamps/*")} zfiles = {fname.split(os.sep)[-1] for fname in glob(".Inputs_zones/*")} for fname in lfiles - zfiles: shutil.move(os.path.join(".Inputs_lamps", fname), dirname) for fname in zfiles - lfiles: shutil.move(os.path.join(".Inputs_zones", fname), dirname) for fname in zfiles & lfiles: if "fctem" in fname: shutil.move(os.path.join(".Inputs_lamps", fname), dirname) elif fname.endswith(".lst"): with open(os.path.join(".Inputs_lamps", fname)) as f: ldat = f.readlines() with open(os.path.join(".Inputs_zones", fname)) as f: zdat = f.readlines() with open(os.path.join(dirname, fname), "w") as f: f.write("".join(sorted(set(ldat + zdat)))) elif fname.endswith(".hdf5"): ldat = MSD.Open(os.path.join(".Inputs_lamps", fname)) zdat = MSD.Open(os.path.join(".Inputs_zones", fname)) for i, dat in enumerate(ldat): zdat[i][dat != 0] = dat[dat != 0] zdat.save(os.path.join(dirname, fname)) else: print("WARNING: File %s not merged properly." % fname) if "origin.hdf5" not in zfiles: origin = MSD.from_domain("domain.ini") origin.save(dirname + "/origin") shutil.rmtree(".Inputs_lamps", True) shutil.rmtree(".Inputs_zones", True) print("Done.")
def MSDOpen(filename, cached={}): if filename in cached: return cached[filename] ds = MSD.Open(filename) cached[filename] = ds return ds
def inputs(): """Prepares the executions inputs.""" print("Preparing the inputs for the experiment.") dir_name = "Inputs/" shutil.rmtree(dir_name, True) os.makedirs(dir_name) shutil.copy("inputs_params.in", dir_name + "inputs_params.in") with open("inputs_params.in") as f: params = yaml.safe_load(f) if (params["zones_inventory"] is not None and params["lamps_inventory"] is not None): print("Validating the inventories.") lamps = np.loadtxt(params["lamps_inventory"], usecols=[0, 1]) zones = np.loadtxt(params["zones_inventory"], usecols=[0, 1, 2]) zonData = pt.parse_inventory(params["zones_inventory"], 7) hasLights = [sum(x[0] for x in z) != 0 for z in zonData] circles = MSD.from_domain("domain.ini") for dat, b in zip(zones, hasLights): circles.set_circle((dat[0], dat[1]), dat[2] * 1000, b) zones_ind = MSD.from_domain("domain.ini") for i, dat in enumerate(zones, 1): zones_ind.set_circle((dat[0], dat[1]), dat[2] * 1000, i) failed = set() for j, coords in enumerate(lamps, 1): for i in range(len(circles)): try: col, row = circles._get_col_row(coords, i) if circles[i][row, col] and col >= 0 and row >= 0: zon_ind = zones_ind[i][row, col] failed.add((j, coords[0], coords[1], zon_ind)) except IndexError: continue if len(failed): for i, lat, lon, zon_ind in sorted(failed): print( "WARNING: Lamp #%d (%.06g,%.06g) falls within non-null zone #%d" % (i, lat, lon, zon_ind)) raise SystemExit() out_name = params["exp_name"] if params["road_orientation"]: print("Computing road orientation (Can be slow for large domains)") from illum.street_orientation import street_orientation with open("domain.ini") as f: domain_params = yaml.safe_load(f) srs = domain_params["srs"] lats, lons = MSD.from_domain("domain.ini").get_obs_pos() bearings = street_orientation(lats, lons, srs) np.savetxt(dir_name + "/brng.lst", bearings, fmt="%g") print("Loading photometry files.") # Angular distribution (normalised to 1) lop_files = glob("Lights/*.lop") angles = np.arange(181, dtype=float) lop = { os.path.basename(s).rsplit(".", 1)[0].split("_", 1)[0]: pt.load_lop(angles, s) for s in lop_files } # Spectral distribution (normalised with scotopric vision to 1) wav, viirs = np.loadtxt("Lights/viirs.dat", skiprows=1).T viirs /= np.max(viirs) wav, norm_spectrum = np.loadtxt("Lights/photopic.dat", skiprows=1).T norm_spectrum /= np.max(norm_spectrum) spct_files = glob("Lights/*.spct") spct = { os.path.basename(s).rsplit(".", 1)[0].split("_", 1)[0]: pt.load_spct(wav, norm_spectrum, s) for s in spct_files } print("Splitting in wavelengths bins.") if os.path.isfile("spectral_bands.dat"): bins = np.loadtxt("spectral_bands.dat", delimiter=",") n_bins = bins.shape[0] else: n_bins = params["nb_bins"] lmin = params["lambda_min"] lmax = params["lambda_max"] limits = np.linspace(lmin, lmax, n_bins + 1) bins = np.stack([limits[:-1], limits[1:]], axis=1) bool_array = (wav >= bins[:, 0:1]) & (wav < bins[:, 1:2]) x = bins.mean(1) print("Interpolating reflectance.") aster_files = glob("Lights/*.aster") aster = { os.path.basename(s).split(".", 1)[0]: np.loadtxt(s) for s in aster_files } for type in aster: wl, refl = aster[type].T wl *= 1000.0 refl /= 100.0 aster[type] = np.interp(wav, wl, refl) sum_coeffs = sum(params["reflectance"][type] for type in params["reflectance"]) if sum_coeffs == 0: sum_coeffs = 1.0 refl = sum(aster[type] * coeff / sum_coeffs for type, coeff in params["reflectance"].items()) reflect = [np.mean(refl[mask]) for mask in bool_array] with open(dir_name + "/refl.lst", "w") as zfile: zfile.write("\n".join(["%.06g" % n for n in reflect]) + "\n") print("Linking mie files.") illumpath = os.path.dirname(illum.__path__[0]) shutil.copy2( os.path.abspath(illumpath + "/Molecular_optics/MolecularAbs.txt"), dir_name, ) OPAC(x) shutil.copy("srtm.hdf5", dir_name) with open(dir_name + "/wav.lst", "w") as zfile: zfile.write("".join("%g\n" % w for w in x)) if params["zones_inventory"] is not None: dir_name = ".Inputs_zones/" inv_name = params["zones_inventory"] n_inv = 7 shutil.rmtree(dir_name, True) os.makedirs(dir_name) from_zones( dir_name, inv_name, n_inv, n_bins, params, out_name, x, lop, angles, wav, spct, viirs, refl, bool_array, ) if params["lamps_inventory"] is not None: dir_name = ".Inputs_lamps/" shutil.rmtree(dir_name, True) os.makedirs(dir_name) from_lamps( dir_name, n_bins, params, out_name, x, lop, angles, wav, spct, viirs, refl, bool_array, ) dir_name = "Inputs/" print("Unifying inputs.") lfiles = {fname.split(os.sep)[-1] for fname in glob(".Inputs_lamps/*")} zfiles = {fname.split(os.sep)[-1] for fname in glob(".Inputs_zones/*")} for fname in lfiles - zfiles: shutil.move(os.path.join(".Inputs_lamps", fname), "Inputs") for fname in zfiles - lfiles: shutil.move(os.path.join(".Inputs_zones", fname), "Inputs") for fname in zfiles & lfiles: if "fctem" in fname: shutil.move(os.path.join(".Inputs_lamps", fname), "Inputs") elif fname.endswith(".lst"): with open(os.path.join(".Inputs_lamps", fname)) as f: ldat = f.readlines() with open(os.path.join(".Inputs_zones", fname)) as f: zdat = f.readlines() with open(os.path.join("Inputs", fname), "w") as f: f.write("".join(sorted(set(ldat + zdat)))) elif fname.endswith(".hdf5"): ldat = MSD.Open(os.path.join(".Inputs_lamps", fname)) zdat = MSD.Open(os.path.join(".Inputs_zones", fname)) for i, dat in enumerate(ldat): zdat[i][dat != 0] = dat[dat != 0] zdat.save(os.path.join("Inputs", fname)) else: print("WARNING: File %s not merged properly." % fname) if "origin.hdf5" not in zfiles: origin = MSD.from_domain("domain.ini") origin.save("Inputs/origin") shutil.rmtree(".Inputs_lamps", True) shutil.rmtree(".Inputs_zones", True) # Interpolation of the obstacles properties defined = MSD.Open(dir_name + "origin.hdf5") lights_file = dir_name + out_name + "_lights.hdf5" if os.path.isfile(lights_file): lights = MSD.Open(lights_file) for i, layer in enumerate(lights): defined[i] += layer for geo in ["obsth", "obstd", "obstf", "altlp"]: geometry = MSD.Open(dir_name + out_name + "_" + geo + ".hdf5") for i, mask in enumerate(defined): geometry[i] = (griddata( points=np.where(mask), values=geometry[i][mask.astype(bool)], xi=tuple(np.ogrid[0:mask.shape[0], 0:mask.shape[1]]), method="nearest", ) if mask.any() else np.zeros_like(geometry[i])) geometry.save(dir_name + out_name + "_" + geo) print("Done.")