def main(): print "#############################################" print "preparing SRTM mosaic:" # read parameter textfile par = ReadPar(os.path.join(os.getcwd(), "PAR/srtm_preparation.par")) demdir = None if hasattr(par, "SRTM_archive"): if os.path.isdir(par.SRTM_archive): demdir = par.SRTM_archive parfiles = finder(os.getcwd(), ["*slc.par", "*mli.par", "*cal.par"]) # define (and create) directories for processing results and logfile path_dem = os.path.join(os.getcwd(), "DEM/") path_log = os.path.join(os.getcwd(), "LOG/GEO/") for path in [path_log, path_dem]: if not os.path.exists(path): os.makedirs(path) # find SRTM tiles for mosaicing demlist = hgt_collect(parfiles, path_dem, demdir=demdir) # remove files created by this function for item in finder(path_dem, ["mosaic*", "dem*", "*.par"]): os.remove(item) if len(demlist) == 0: raise IOError("no hgt files found") # perform mosaicing if multiple files are found if len(demlist) > 1: print "mosaicing..." dem = os.path.join(path_dem, "mosaic") mosaic(demlist, dem) else: dem = demlist[0] dempar(dem) fill(dem, os.path.join(path_dem, "dem_final"), path_log) dem = os.path.join(path_dem, "dem_final") # transform DEM to UTM if par.utm == "True": print "transforming to UTM..." transform(dem, dem+"_utm", int(par.targetres)) hdr(dem+"_utm.par") print "...done" print "#############################################"
def hgt_collect(parfiles, outdir, demdir=None): # concatenate required hgt tile names target_ids = hgt(parfiles) targets = [] # define server and its subdirectories # tiff alternative (not implemented): ftp://srtm.csi.cgiar.org/SRTM_v41/SRTM_Data_GeoTIFF/ server = "http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/" continents = ["Africa", "Australia", "Eurasia", "Islands", "North_America", "South_America"] pattern = "[NS][0-9]{2}[EW][0-9]{3}" # if an additional dem directory has been defined, check this directory for required hgt tiles if demdir is not None: for item in finder(demdir, target_ids): targets.append(item) # check for additional potentially existing hgt tiles in the defined output directory for item in [os.path.join(outdir, x) for x in target_ids if os.path.isfile(os.path.join(outdir, x)) and not re.search(x, "\n".join(targets))]: targets.append(item) for item in targets: print item # search server for all required tiles, which were not found in the local directories if len(targets) < len(target_ids): print "searching for SRTM tiles on the server..." onlines = [] for continent in continents: path = os.path.join(server, continent) response = urlopen(path).read() for item in re.findall(pattern+"[.]hgt.zip", response): outname = re.findall(pattern, item)[0]+".hgt" if outname in target_ids and outname not in [os.path.basename(x) for x in targets]: onlines.append(os.path.join(path, item)) onlines = list(set(onlines)) for item in onlines: print item # if additional tiles have been found online, download and unzip them to the local directory if len(onlines) > 0: print "downloading {0} SRTM tiles...".format(len(onlines)) for candidate in onlines: localname = os.path.join(outdir, re.findall(pattern, candidate)[0]+".hgt") infile = urlopen(candidate) with open(localname+".zip", "wb") as outfile: outfile.write(infile.read()) infile.close() with zipfile.ZipFile(localname+".zip", "r") as z: z.extractall(outdir) os.remove(localname+".zip") targets.append(localname) return targets
# read parameter file par = ReadPar(os.path.join(os.getcwd(), "PAR/cal_slc.par")) # define (and create) directories for processing results and logfile path_log = os.path.join(os.getcwd(), "LOG/GEO/") path_out = os.path.join(os.getcwd(), "ISP/") for path in [path_log, path_out]: if not os.path.exists(path): os.makedirs(path) list_K_dB = {"PSR1": "-115.0"} list_slc = finder(os.getcwd(), ["*_slc"]) if len(list_slc) > 0: print "#############################################" print "calibration started..." for name_slc in list_slc: sensor = name_slc.split("_")[0] if sensor in list_K_dB: K_dB = list_K_dB[sensor] else: print "calibration for sensor "+sensor+"not implemented" name_cslc = name_slc[:-3]+"cslc" run(["radcal_SLC", name_slc, name_slc+".par", name_cslc, name_cslc+".par", "1", "-", "0", "0", "0", "0", "-", K_dB], path_out, path_log)
path_log = os.path.join(os.getcwd(), "LOG/LAT/") if not os.path.exists(path_log): os.makedirs(path_log) # create list of scene tuple objects # all images following the defined patterns within the same folder (i.e. the same acquisition) will be grouped together tuples = grouping() print "#############################################" print "creating pauli decomposition..." for scene in tuples: if len(set(["HH_slc", "VV_slc", "HV_slc"]) & set(scene.__dict__.keys())) == 3: print scene.basename path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/") if not os.path.exists(path_out): os.makedirs(path_out) name_out = os.path.join(path_out, os.path.basename(scene.HH_slc)[:-6] + "pauli") run([ "pauli", scene.HH_slc, scene.VV_slc, scene.HV_slc, scene.HH_slc + ".par", scene.VV_slc + ".par", scene.HV_slc + ".par", name_out ], os.getcwd(), path_log) # rename files to consistent pattern for filename in finder(os.getcwd(), ["*.slc*"]): os.rename(filename, filename.replace(".slc", "_slc")) print "...done" print "#############################################"
from gammaGUI_301015.ancillary import finder, run, ReadPar orbit_correct = True if sys.argv[-1] == "True" else False # path to delft orbit files path_delft = "/pvdata2/john/ancillary/ERS/ORBIT/delft" # path to antenna correction files path_cal = "/pvdata2/john/ancillary/ERS/CAL/ERS_antenna" # define (and create) directory for logfile path_log = os.path.join(os.getcwd(), "LOG/IMP/") if not os.path.exists(path_log): os.makedirs(path_log) scenes = [os.path.dirname(x) for x in finder(sys.argv[1], ["*LEA_01.001"])] if len(scenes) == 0: raise IOError("no appropriate file found") for scene in scenes: print "----------" # read leader file for meta information with open(os.path.join(scene, "LEA_01.001"), "r") as infile: text = [line for line in infile] text = "".join(text) # extract frame id frame_index = re.search("FRAME=", text).end() frame = text[frame_index:frame_index + 4]
from gammaGUI_301015.isp_parameterfile import ISPPar # define (and create) directories for processing results and logfile path_log = os.path.join(os.getcwd(), "LOG/ISP/") path_out = os.path.join(os.getcwd(), "ISP/") for path in [path_log, path_out]: if not os.path.exists(path): os.makedirs(path) par = ReadPar(os.path.join(os.getcwd(), "PAR", "coherence_ad.par"), type="exe") # retrieve additional arguments from script call differential = True if par.differential == "True" else False # find flattened interferograms list_flt = finder(path_out, ["*_int_diff"]) if differential else finder( path_out, ["*_flt"]) if len(list_flt) > 0: print "#############################################" print "estimation started..." for name_flt in list_flt: # extract timestamps from flt name id_pwr = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_flt) # find mli/rmli images matching the extracted timestamps try: name_mli = finder(os.getcwd(), ["*" + id_pwr[0] + "_mli"])[0] # name_rmli = finder(os.getcwd(), ["*"+id_pwr[1]+"_reg_mli"])[0] name_rmli = name_flt[:
lat = lon = [] with open(csvname, "r") as csvfile: for row in csv.DictReader(csvfile): if re.search(os.path.basename(zipfile), row["zipFilename"]): lat = [row["upperLeftLat"], row["lowerRightLat"]] lon = [row["upperLeftLon"], row["lowerRightLon"]] break # concatenate names of required srtm tiles with convention "[NS][0-9]{2}[EW][0-9]{3}.hgt" if len(lat) > 0 and len(lon) > 0: target_ids = hgt(lat, lon) else: raise IOError("meta data entry missing") # search for required tiles in the defined srtm directory targets = finder(srtmdir, target_ids) # check whether all required tiles were found and copy them to the working directory if len(targets) < len(target_ids): raise IOError("missing hgt files") else: for item in targets: shutil.copy(item, outdir) targets = [os.path.join(outdir, x) for x in target_ids] # create gamma parameter files for all DEMs dempar(targets) # perform mosaicing if necessary if len(targets) > 1: mosaic(targets, name_dem)
print "#############################################" print "creating fd3c decomposition..." for scene in tuples: if len({"HH_slc_cal", "VV_slc_cal", "HV_slc_cal", "t13", "HH_slc_cal_mli"} & set(scene.__dict__.keys())) == 5: print scene.basename rlks = ReadPar(scene.HH_mli + ".par").range_looks azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks run([ "FD3C_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.t13, ReadPar(scene.HH_slc + ".par").range_samples, scene.basename, rlks, azlks ], os.path.dirname(scene.t13), path_log) for tag in ["_fdd_pd", "_fdd_ps", "_fdd_pv"]: hdr( scene.HH_mli + ".par", os.path.join(os.path.dirname(scene.t13), scene.basename) + tag + ".hdr") # rename files to consistent pattern for pattern in ["*.fdd*"]: for filename in finder(os.getcwd(), [pattern]): os.rename( filename, filename.replace(pattern.strip("*"), pattern.strip("*").replace(".", "_"))) print "...done" print "#############################################"
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles, intermediates, verbose): with tempfile.NamedTemporaryFile(delete=False, dir=outdir) as mainlog: # Definition geocode_back interpolation function # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this # 0 - Nearest Neighbor # 1 - Bicubic Spline # 2 - Bicubic Spline-Log func_geoback = 2 # function for interpolation of layover/shadow/foreshortening/DEM gaps # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned func_interp = 0 # Definition of the multi-look factor # Enter the number of looks you want to use in range and azimuth" # Try to achieve Level 1.5 azimuth pixel dimension and squarish pixels in GR image" # number of looks in range ml_rg = 4 # number of looks in azimuth ml_az = 2 # DEM oversampling factor" # for S1 GRDH: final resolution: 20m # 30m SRTM: # dem_ovs = 1.5 # 90m SRTM: dem_ovs = 5 message = "##########################################\n%s\n-------------\nprocessing started: %s\n" % ( zipfile[:-4], asctime()) print message if verbose else mainlog.writelines(message) if not verbose: os.rename(mainlog.name, os.path.join(outdir, "main.log")) ###################################################################### pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$" # unzip the dataset try: with zf.ZipFile(zipfile, "r") as z: scene = sorted(z.namelist())[0].strip("/") match = re.match(pattern, scene) orbit = "D" if float( re.findall("[0-9]{6}", match.group("start"))[1]) < 120000 else "A" outname_base = "_".join([ os.path.join(outdir, match.group("sat")), match.group("beam"), match.group("start").replace("T", "_"), orbit ]) if not os.path.exists(os.path.join(tempdir, scene)) and len( finder(outdir, [os.path.basename(outname_base)], regex=True)) == 0: if not z.testzip(): if verbose: print "unzipping data..." # print z.testzip() z.extractall(tempdir) tempdir = os.path.join(tempdir, scene) else: print "Corrupt zip" return else: print "file already imported/processed" return except ImportError: print "...skipped" return # create logfile folder if this option was selected if logfiles: path_log = outname_base + "_log" if os.path.exists(path_log): shutil.rmtree(path_log) os.makedirs(path_log) else: path_log = None ###################################################################### print "converting to GAMMA format..." try: run([ "/usr/local/bin/python2.7", os.path.join(os.getcwd(), "reader.py"), tempdir ], outdir=tempdir, logpath=path_log) except ImportError: print "...failed" return files_slc = finder(tempdir, ["*_slc"]) if len(files_slc) > 0: if verbose: print "multilooking..." for item in files_slc: run([ "multi_look", item, item + ".par", item[:-3] + "mli", item[:-3] + "mli.par", ml_rg, ml_az ], logpath=path_log) files_mli = finder(tempdir, ["*_mli"]) master = files_mli[0] base = master[:-3] dem_seg = base + "dem" lut = base + "lut" lut_fine = base + "lut_fine" sim_sar = base + "sim_sar" u = base + "u" v = base + "v" inc = base + "inc" psi = base + "psi" pix = base + "pix" ls_map = base + "ls_map" pixel_area = base + "pixel_area" pixel_area2 = base + "pixel_area2" offs = base + "offs" coffs = base + "coffs" coffsets = base + "coffsets" snr = base + "snr" ellipse_pixel_area = base + "ellipse_pixel_area" ratio_sigma0 = base + "ratio_sigma0" # read image parameter file for meta information par = ReadPar(master + ".par") incidence = str(int(float(par.incidence_angle))) outname_base = outname_base + "_" + incidence ###################################################################### if verbose: print "mosaicing SRTM data..." name_srtm = os.path.join(tempdir, "srtm") # extract corner coordinates from gamma parameter files and concatenate names of required hgt files lat, lon = srtm.latlon([x + ".par" for x in files_mli]) target_ids = srtm.hgt(lat, lon) # search for required tiles in the defined srtm directory targets = finder(srtmdir, target_ids) # copy hgt files to temporary directory if len(targets) > 0: for item in targets: shutil.copy(item, tempdir) targets = finder(tempdir, target_ids) else: print "...failed" return # create gamma parameter files for all DEMs srtm.dempar(targets) # mosaic hgt files srtm.mosaic(targets, name_srtm) # interpolate data gaps srtm.replace(name_srtm, name_srtm + "_fill", path_log) os.remove(name_srtm) os.remove(name_srtm + ".par") name_srtm += "_fill" # project DEM to UTM if transform: if verbose: print "reprojecting DEM..." srtm.transform(name_srtm, name_srtm + "_utm") name_srtm += "_utm" # remove hgt files from temporary directory for item in targets: os.remove(item) os.remove(item + ".par") ###################################################################### # create DEM products; command is automatically chosen based on SAR imagery parameter file entries (flawless functioning yet to be tested) if verbose: print "sar image simulation..." try: if ReadPar(master + ".par").image_geometry == "GROUND_RANGE": run([ "gc_map_grd", master + ".par", name_srtm + ".par", name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs, dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8, func_interp ], logpath=path_log) else: run([ "gc_map", master + ".par", "-", name_srtm + ".par", name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs, dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8, func_interp ], logpath=path_log) except IOError: print "...failed" return ###################################################################### if verbose: print "initial pixel area estimation..." run([ "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut, ls_map, inc, pixel_area ], logpath=path_log) ###################################################################### if verbose: print "exact offset estimation..." try: inlist = ["", "0 0", "100 100", "128 128", "7.0"] run([ "create_diff_par", master + ".par", "-", master + "_diff.par", 1 ], inlist=inlist, logpath=path_log) run([ "offset_pwrm", master, pixel_area, master + "_diff.par", offs, snr, 128, 128, offs + ".txt", "-", 200, 200, 7.0 ], logpath=path_log) except: print "...failed" return ###################################################################### if verbose: print "computation of offset polynomials..." try: run([ "offset_fitm", offs, snr, master + "_diff.par", coffs, coffsets, "-", 4, 0 ], logpath=path_log) except: print "...failed" return ###################################################################### if verbose: print "supplementing lookuptable with offset polynomials..." try: sim_width = ReadPar(dem_seg + ".par").width run([ "gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine, 0 ], logpath=path_log) except: print "...failed" return ###################################################################### if verbose: print "refined pixel area estimation..." try: run([ "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut_fine, ls_map, inc, pixel_area2 ], logpath=path_log) except: print "...failed" return ###################################################################### if verbose: print "radiometric calibration and normalization..." try: slc_width = ReadPar(master + ".par").range_samples run([ "radcal_MLI", master, master + ".par", "-", master + "_cal", "-", 0, 0, 1, 0.0, "-", ellipse_pixel_area ], logpath=path_log) run([ "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0, slc_width, 1, 1 ], logpath=path_log) for item in files_mli: run([ "product", item, ratio_sigma0, item + "_pixcal", slc_width, 1, 1 ], logpath=path_log) except: print "...failed" return ###################################################################### if verbose: print "backward geocoding, normalization and conversion to dB..." for item in files_mli: run([ "geocode_back", item + "_pixcal", slc_width, lut_fine, item + "_geo", sim_width, 0, func_geoback ], logpath=path_log) run([ "lin_comb", "1", item + "_geo", 0, math.cos(math.radians(float(par.incidence_angle))), item + "_geo_flat", sim_width ], logpath=path_log) run([ "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm", sim_width ], logpath=path_log) ###################################################################### print "creating final tiff files..." for item in finder(tempdir, ["*_geo_norm"]): polarization = re.findall("[HV]{2}", os.path.basename(item))[0].lower() outname = outname_base + "_" + polarization run([ "data2geotiff", dem_seg + ".par", item, 2, outname + "_geocoded_norm.tif" ], logpath=path_log) annotation_dir = os.path.join(tempdir, "annotation") annotation = os.path.join(annotation_dir, [ x for x in os.listdir(annotation_dir) if polarization in os.path.basename(x) ][0]) os.rename(annotation, outname + "_annotation.xml") ###################################################################### if verbose: print "cleaning up..." # copy, rename and edit quicklook kml and png shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"), outname_base + "_quicklook.kml") shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"), outname_base + "_quicklook.png") with open(outname_base + "_quicklook.kml", "r") as infile: kml = infile.read().replace( "quick-look.png", os.path.basename(outname_base + "_quicklook.png")) with open(outname_base + "_quicklook.kml", "w") as outfile: outfile.write(kml) if not intermediates: shutil.rmtree(tempdir) if logfiles: os.rename(path_log, outname_base + "_log") if verbose: print "...done:", asctime() print "##########################################"
import re import os from gammaGUI_301015.ancillary import finder, ReadPar, run # read parameter file par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par"), type="exe") # define (and create) directories for processing results and logfile path_log = os.path.join(os.getcwd(), "LOG/ISP/") path_out = os.path.join(os.getcwd(), "ISP/") for path in [path_log, path_out]: if not os.path.exists(path): os.mkdir(path) interferograms = finder(path_out, ["*int"]) if len(interferograms) > 0: print "#############################################" print "baseline estimation started..." for name_int in interferograms: name_off = name_int[:-3] + "off" name_base = name_int[:-3] + "base_init" print os.path.basename(name_base) slc = finder(os.getcwd(), [ re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?", name_int)[0] ])[0] # rslc = finder(os.getcwd(), [re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_int)[1]+"_reg"])[0] rslc = name_int[:-3] + "reg"
############################################################## # delete/remove files following defined patterns in the current directory and its subdirectories # module of software gammaGUI # John Truckenbrodt 2015 ############################################################## import sys import os import shutil from gammaGUI_301015.ancillary import finder # find all files matching the defined pattern(s) items = finder(os.getcwd(), sys.argv[1].split(", "), regex=True if sys.argv[3] == "True" else False) # exclude files in the export directory items = [x for x in items if "/EXP/" not in x] if len(items) > 0: path_exp = os.path.join(os.getcwd(), "EXP/") if sys.argv[2] == "export" and not os.path.exists(path_exp): os.makedirs(path_exp) print "the following files will be", { "export": "exported", "delete": "deleted" }[sys.argv[2]] + ":" for item in items: print item
import subprocess as sp import multiprocessing as mp from gammaGUI_301015.ancillary import finder script = "/homes4/geoinf/ve39vem/scripts/python/S1_main.py" zipdir = "/homes4/geoinf/ve39vem/RADAR/Sentinel/archive/sweden" tempdir = "/homes4/geoinf/ve39vem/RADAR/Sentinel/test_in" outdir = "/homes4/geoinf/ve39vem/RADAR/Sentinel/test_out" srtmdir = "/geonfs02_vol1/SRTM_3_HGT/01_hgt" def execute(item): # logfile = os.path.join(tempdir, os.path.basename(item)[:-4]) sp.check_call( [sys.executable, script, "-l", "-i", item, tempdir, outdir, srtmdir]) if __name__ == '__main__': files = finder(zipdir, ["*.zip"]) files = [x for x in files if "GRDH" in x] pool = mp.Pool(processes=3) try: result = pool.map(execute, files) except: pool.close() pool.join()
import sys import os import subprocess as sp from gammaGUI_301015.ancillary import finder with open(sys.argv[1], "r") as inlist: processlist = [ line.split() for line in inlist if not line.startswith("#") and not line.strip() == "" ] for couple in processlist: slc1 = finder(os.getcwd(), [couple[0] + "$"], regex=True) slc2 = finder(os.getcwd(), [couple[1] + "$"], regex=True) if len(slc1) != 1: print "descriptor", slc1, "ambiguous or file not existing" continue elif len(slc2) != 1: print "descriptor", slc2, "ambiguous or file not existing" continue else: sp.check_call([ "python", os.path.join(os.path.dirname(sys.argv[0]), "coreg.py"), slc1[0], slc2[0] ], cwd=os.getcwd())
int(targetres[1]) targetres = ["-tr", targetres] except IOError: print "invalid resolution statement" else: raise IOError("invalid resolution statement") else: targetres = [] nodata = ["-dstnodata", nodata] format = ["-of", "ENVI"] resampling = ["-r", resampling] # find all files matching the defined pattern(s) items = finder(path_in, pattern.split(", "), regex=True if reg == "True" else False) if len(items) > 0: path_out = os.path.dirname(file_out) if not os.path.exists(path_out): os.makedirs(path_out) print "the following files will be stacked to file {0}:".format(file_out) for item in items: print item decision = raw_input("proceed (y/n)?: ") if decision == "y": vrt = file_out + ".vrt" sp.check_call( dissolve( ["gdalbuildvrt", "-q", "-overwrite", "-separate", vrt, items]))
import os import re from gammaGUI_301015.ancillary import finder, ReadPar, run # read parameter file par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par")) # define (and create) directories for processing results and logfile path_log = os.path.join(os.getcwd(), "LOG/ISP/") path_out = os.path.join(os.getcwd(), "ISP/") for path in [path_log, path_out]: if not os.path.exists(path): os.mkdir(path) interferograms = finder(path_out, ["*int"]) if len(interferograms) > 0: print "#############################################" print "interferogram flattening started..." for name_int in interferograms: print os.path.basename(name_int) # retrieve full name of primary and secondary SLC files scenes = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?", name_int) primary = finder(os.getcwd(), [scenes[0] + "$"], regex=True)[0] # secondary = finder(os.getcwd(), [scenes[1]+"_reg"], regex=True)[0] secondary = name_int[:-3] + "reg" # collect geocoding lookup tables