def transform(infile, outfile, posting=90):

    # read DEM parameter file
    par = ReadPar(infile + ".par")

    # transform corner coordinate to UTM
    utm = UTM(infile + ".par")

    if os.path.isfile(outfile + ".par"):
        os.remove(outfile + ".par")
    if os.path.isfile(outfile):
        os.remove(outfile)

    # determine false northing from parameter file coordinates
    falsenorthing = "10000000." if "-" in par.corner_lat else "0"

    # create new DEM parameter file with UTM projection details
    inlist = [
        "UTM", "WGS84", 1, utm.zone, falsenorthing, outfile, "", "", "", "",
        "", "-" + str(posting) + " " + str(posting), ""
    ]
    run(["create_dem_par", outfile + ".par"], inlist=inlist)

    # transform dem
    run([
        "dem_trans", infile + ".par", infile, outfile + ".par", outfile, "-",
        "-", "-", 1
    ])
def mosaic(demlist, outname, byteorder=1, gammapar=True):
    nodata = str(raster.Raster(demlist[0]).nodata)
    run(
        dissolve([
            "gdalwarp", "-q", "-of", "ENVI", "-srcnodata", nodata,
            "-dstnodata", nodata, demlist, outname
        ]))
    if byteorder == 1:
        swap(outname, outname + "_swap")
        for item in [outname, outname + ".hdr", outname + ".aux.xml"]:
            os.remove(item)
        os.rename(outname + "_swap", outname)
        os.rename(outname + "_swap.hdr", outname + ".hdr")
    if gammapar:
        dempar(outname)
def dempar(dem, logpath=None):
    rast = raster.Raster(dem)

    # determine data type
    dtypes = {"Int16": "INTEGER*2", "UInt16": "INTEGER*2", "Float32": "REAL*4"}
    if rast.dtype not in dtypes:
        raise IOError("data type not supported")
    else:
        dtype = dtypes[rast.dtype]

    # format pixel posting and top left coordinate
    posting = str(rast.geotransform["yres"]) + " " + str(
        rast.geotransform["xres"])
    latlon = str(rast.geotransform["ymax"]) + " " + str(
        rast.geotransform["xmin"])

    # evaluate projection
    projections = {"longlat": "EQA", "utm": "UTM"}
    if rast.proj4args["proj"] not in projections:
        raise IOError("projection not supported (yet)")
    else:
        projection = projections[rast.proj4args["proj"]]

    # get ellipsoid
    ellipsoid = rast.proj4args[
        "ellps"] if "ellps" in rast.proj4args else rast.proj4args["datum"]
    if ellipsoid != "WGS84":
        raise IOError("ellipsoid not supported (yet)")

    # create list for GAMMA command input
    parlist = [
        projection, ellipsoid, 1,
        os.path.basename(dem), dtype, 0, 1, rast.cols, rast.rows, posting,
        latlon
    ]

    # execute GAMMA command
    run(["create_dem_par", os.path.splitext(dem)[0] + ".par"],
        os.path.dirname(dem),
        logpath,
        inlist=parlist)
def fill(dem, dem_out, logpath, replace=False):

    width = ReadPar(dem + ".par").width

    path_dem = os.path.dirname(dem_out)

    rpl_flg = 0
    dtype = 4

    # replace values
    value = 0
    new_value = 1
    run([
        "replace_values", dem, value, new_value, dem + "_temp", width, rpl_flg,
        dtype
    ], path_dem, logpath)

    value = -32768
    new_value = 0
    run([
        "replace_values", dem + "_temp", value, new_value, dem + "_temp2",
        width, rpl_flg, dtype
    ], path_dem, logpath)

    # interpolate missing values
    r_max = 9
    np_min = 40
    np_max = 81
    w_mode = 2
    run([
        "interp_ad", dem + "_temp2", dem_out, width, r_max, np_min, np_max,
        w_mode, dtype
    ], path_dem, logpath)

    # remove temporary files
    os.remove(dem + "_temp")
    os.remove(dem + "_temp2")

    # duplicate parameter file for newly created dem
    shutil.copy(dem + ".par", dem_out + ".par")

    # create ENVI header file
    hdr(dem_out + ".par")

    if replace:
        for item in [
                dem + x for x in ["", ".par", ".hdr", ".aux.xml"]
                if os.path.isfile(dem + x)
        ]:
            os.remove(item)
    raise IOError("no appropriate file found")

for scene in scenes:
    print "----------"
    # read leader file for meta information
    with open(os.path.join(scene, "LEA_01.001"), "r") as infile:
        text = [line for line in infile]
    text = "".join(text)

    # extract frame id
    frame_index = re.search("FRAME=", text).end()
    frame = text[frame_index:frame_index+4]

    tempname = os.path.join(os.getcwd(), "temp")
    print "importing..."
    run(["par_ESA_ERS", "LEA_01.001", tempname+".par", "DAT_01.001", tempname], scene, path_log, [""])
    par = ReadPar(tempname+".par")

    date = "".join([format(int(x), "02d") for x in par.date[0:3]])
    timestamp = date+"T"+time.strftime("%H%M%S", time.gmtime(round(float(par.center_time[0]))))
    outname = par.sensor+"_"+frame+"_"+timestamp+"_VV_slc"
    path_out = os.path.join(os.getcwd(), outname[:-7])
    if not os.path.exists(path_out):
        print outname
        os.makedirs(path_out)
        os.rename(tempname, os.path.join(path_out, outname))
        os.rename(tempname+".par", os.path.join(path_out, outname+".par"))
    else:
        print "scene", outname, "already imported; removing temporary files"
        os.remove(tempname)
        os.remove(tempname+".par")
Beispiel #6
0
if len(list_int) > 0:
    print "#############################################"
    print "interferogram flattening started..."

    for name_int in list_int:
        slc_par = finder(os.getcwd(), [
            re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)",
                       name_int)[0]
        ])[0] + ".par"
        name_off = name_int[:-3] + "off"
        if not os.path.isfile(name_off):
            raise IOError("offset file missing")

        if os.path.isfile(name_int[:-3] + "base_refine"):
            name_base = name_int[:-3] + "base_refine"
        elif os.path.isfile(name_int[:-3] + "base_init"):
            name_base = name_int[:-3] + "base_init"
        else:
            raise IOError("baseline file missing")
        name_flt = name_int[:-3] + "flt"
        print os.path.basename(name_flt)
        run([
            "ph_slope_base", name_int, slc_par, name_off, name_base, name_flt
        ], path_out, path_log)

    print "...done"
    print "#############################################"
else:
    print "#############################################"
    print "no interferograms found"
    print "#############################################"
Beispiel #7
0
# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating huynen decomposition..."
counter = 0
for scene in tuples:
    if len(
            union(["HH_slc", "VV_slc", "HV_slc", "t11", "t12", "t13"],
                  scene.__dict__.keys())) == 6:
        print scene.basename
        for i in range(1, 4):
            run([
                "HUYNEN_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc,
                scene.t11, scene.t12, scene.t13,
                ReadPar(scene.HH_slc + ".par").range_samples, scene.basename,
                str(i)
            ], os.path.dirname(scene.t11), path_log)
        counter += 1
if counter == 0:
    print "no appropriate scenes with existing coherence matrix found"
else:
    # rename files to consistent pattern
    for pattern in ["*.t*", "*.im", "*.re"]:
        for filename in finder(os.getcwd(), [pattern]):
            os.rename(
                filename,
                filename.replace(pattern.strip("*"),
                                 pattern.strip("*").replace(".", "_")))

print "...done"
Beispiel #8
0
tuples = grouping()

print "#############################################"
print "transforming scenes..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc"])
           & set(scene.__dict__.keys())) == 3:
        print scene.basename
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        run([
            "lin_comb_cpx", "3", scene.HH_slc, scene.VV_slc, scene.HV_slc,
            par.constant_r, par.constant_i, par.factorHH_r, par.factorHH_i,
            par.factorVV_r, par.factorVV_i, par.factorHV_r, par.factorHV_i,
            scene.basename + "_rr",
            ReadPar(scene.HH_slc + ".par").range_samples, "", "", par.pixav_x,
            par.pixav_y, "1"
        ], path_out, path_log)
        run([
            "lin_comb_cpx", "3", scene.VV_slc, scene.HH_slc, scene.HV_slc,
            par.constant_r, par.constant_i, par.factorVV_r, par.factorVV_i,
            par.factorHH_r, par.factorHH_i, par.factorHV_r, par.factorHV_i,
            scene.basename + "_ll",
            ReadPar(scene.HH_slc + ".par").range_samples, "", "", par.pixav_x,
            par.pixav_y, "1"
        ], path_out, path_log)
        run([
            "lin_comb_cpx", "2", scene.HH_slc, scene.VV_slc, par.constant_r,
            par.constant_i, par.factorHH_r, par.factorHH_i, par.factorVV_r,
            par.factorVV_i, scene.basename + "_rl",
Beispiel #9
0
            name_mli = finder(os.getcwd(), ["*" + id_pwr[0] + "_mli"])[0]
            # name_rmli = finder(os.getcwd(), ["*"+id_pwr[1]+"_reg_mli"])[0]
            name_rmli = name_flt[:
                                 -8] + "reg_mli" if differential else name_flt[:
                                                                               -3] + "reg_mli"
        except:
            raise IOError("multilooked images missing")

        # concatenate coherence image name
        name_cc = name_flt + "_cc_ad"

        if not os.path.isfile(name_cc):
            print os.path.basename(name_cc)
            # read image samples
            samples = str(ISPPar(name_mli + ".par").range_samples)
            # run gamma command
            run([
                'cc_ad', name_flt, name_mli, name_rmli, "-", "-", name_cc,
                samples, par.box_min, par.box_max, par.wgt_ad
            ], path_out, path_log)
        else:
            print "coherence image", name_cc, "already exists"

    print "...estimation finished"
    print "#############################################"
else:
    print "#############################################"
    print "no {0} interferograms found".format(
        "differential" if differential else "flattened")
    print "#############################################"
Beispiel #10
0
# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating krogager decomposition..."

for scene in tuples:
    if len(
            set(["HH_slc", "rl", "ll", "rr", "HH_mli"])
            & set(scene.__dict__.keys())) == 5:
        print scene.basename
        path_out = os.path.dirname(scene.rl)
        mlipar = scene.HH_mli + ".par"
        rlks = ReadPar(mlipar).range_looks
        azlks = ReadPar(mlipar).azimuth_looks
        run([
            "multi_look", scene.rl, scene.HH_slc + ".par",
            scene.basename + "_sphere", mlipar, rlks, azlks
        ], path_out, path_log)
        run([
            "diplane_helix", scene.ll, scene.rr, scene.HH_slc + ".par",
            scene.basename + "_diplane", scene.basename + "_helix", mlipar,
            rlks, azlks, "-", "-", "-"
        ], path_out, path_log)

        for tag in ["_sphere", "_helix", "_diplane"]:
            hdr(scene.HH_mli + ".par",
                os.path.join(path_out, scene.basename) + tag + ".hdr")

print "...done"
print "#############################################"
Beispiel #11
0
print "#############################################"
print "creating cloude decomposition..."

counter = 0

for scene in tuples:
    if len({"HH_slc", "VV_slc", "HV_slc", "t12", "t13", "HH_mli"}
           & set(scene.__dict__.keys())) == 6:
        counter += 1
        print scene.basename
        rlks = ReadPar(scene.HH_mli + ".par").range_looks
        azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks
        run([
            "CLOUDE_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.t12,
            scene.t13,
            ReadPar(scene.HH_slc + ".par").range_samples, scene.basename, rlks,
            azlks
        ], os.path.dirname(scene.t12), path_log)
        # create envi header files (note: number of lines must be reduced by 1 on import into envi)
        for i in range(1, 4):
            hdr(
                scene.HH_mli + ".par",
                os.path.join(os.path.dirname(scene.t12), scene.basename) +
                "_ctd_" + str(i) + "_mag.hdr")

if counter == 0:
    print "no scenes with required scattering and coherency matrix elements found"

# rename files to consistent pattern
for pattern in ["*.ctd*", "*.mag", "*.pha"]:
    for filename in finder(os.getcwd(), [pattern]):
Beispiel #12
0
    print "#############################################"
    print "interferogram generation started..."
    for name_off in offsets:
        name_int = name_off[:-3] + "int"
        print os.path.basename(name_int)

        slc = finder(os.getcwd(), [
            re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?",
                       name_int)[0]
        ])[0]
        # rslc = finder(os.getcwd(), [re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_int)[1]+"_reg"])[0]
        rslc = name_off[:-3] + "reg"

        # read the master mli parameter file for multilooking factors
        try:
            par_mli = ReadPar(slc + "_mli.par")
        except IOError:
            print "MLI for the primary SLC missing"

        run([
            "SLC_intf", slc, rslc, slc + ".par", rslc + ".par", name_off,
            name_int, par_mli.range_looks, par_mli.azimuth_looks, "0", "-",
            par.sps_flg, par.azf_flg, par.rp1_flg, par.rp2_flg
        ], path_out, path_log)

    print "...done"
    print "#############################################"
else:
    print "#############################################"
    print "no coregistration offset files found"
    print "#############################################"
Beispiel #13
0
from ancillary import grouping, run, finder, ReadPar

path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/mat_cov.par"))

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating covariance matrices..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc", "HH_mli"]) & set(scene.__dict__.keys())) == 4:
        print scene.basename
        rlks = ReadPar(scene.HH_mli+".par").range_looks
        azlks = ReadPar(scene.HH_mli+".par").azimuth_looks
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        run(["polcovar", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.HH_slc+".par", scene.HV_slc+".par", scene.VV_slc+".par", os.path.basename(scene.basename),
             os.path.basename(scene.basename)+"_mat_cov.par", rlks, azlks], path_out, path_log)

# rename files to consistent pattern
for filename in finder(os.getcwd(), ["*.c*"]):
    os.rename(filename, filename.replace(".c", "_c"))

print "...done"
print "#############################################"
Beispiel #14
0
name_base = os.path.basename(slc1) + "_" + os.path.basename(slc2) + "_"
name_coffs = name_base + "coffs"
name_coffsets = name_base + "coffsets"
name_off = name_base + "off"
name_offs = name_base + "offs"
name_offsets = name_base + "offsets"
name_snr = name_base + "snr"
name_reg = name_base + "reg"

print "#############################################"
print os.path.basename(slc1), "->", os.path.basename(slc2)
print "----------"
print "coregistration started..."

run([
    "create_offset", slc1 + ".par", slc2 + ".par", name_off, par.algorithm, 1,
    1, 0
], path_out, path_log)

print "...estimation of initial range and azimuth offsets"

# first estimation using orbit data (most important in case of very large offsets)
run(["init_offset_orbit", slc1 + ".par", slc2 + ".par", name_off], path_out,
    path_log)

# repeated offset estimation using different levels of multilooking
isp = ReadPar(slc1 + ".par")
mlk = Spacing(isp)

for factor in [4, 2, 1]:
    run([
        "init_offset", slc1, slc2, slc1 + ".par", slc2 + ".par", name_off,
Beispiel #15
0
    # pix = basename+"_pix"
    # psi = basename+"_psi"
    sim_map = basename+"_sim_map"
    sim_rdc = basename+"_sim_rdc"
    snr = basename+"_snr"
    slope = basename+"_u"
    aspect = basename+"_v"

    # define parameter file of the master
    masterpar = master + ".par"

    # perform forward geocoding if the corresponding refined lookup table does not yet exist
    if not os.path.isfile(lut_fine):

        print"creating initial lookup table"
        run(["gc_map", masterpar, "-", dem + ".par", dem, dem_sub + ".par", dem_sub, lut_rough, "-", "-", sim_map, slope, aspect, inc, "-", "-", ls_map, par.frame, 2],
            path_out, path_log)

        hdr(dem_sub+".par", inc+".hdr")
        hdr(dem_sub+".par", ls_map+".hdr")
        hdr(dem_sub+".par", slope+".hdr")
        hdr(dem_sub+".par", aspect+".hdr")

        if tnorm:
            print "initial pixel area estimation"
            topo_base = os.path.join(path_out, os.path.basename(master))
            pix_gamma = topo_base+"_pix_gamma"
            parfile = master+".par"
            run(["pixel_area", parfile, dem_sub+".par", dem_sub, lut_rough, ls_map, inc, "-", pix_gamma], path_out, path_log)

        # read additional parameters
        samples_dem = ReadPar(dem_sub + ".par").width
Beispiel #16
0
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/mat_coh.par"))

tuples = grouping()

print "#############################################"
print "creating coherence matrices..."

for scene in tuples:
    if len(
            set([
                "pauli_alpha_slc", "pauli_beta_slc", "pauli_gamma_slc",
                "HH_mli"
            ]) & set(scene.__dict__.keys())) == 4:
        print scene.basename
        rlks = ReadPar(scene.HH_mli + ".par").range_looks
        azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks
        run([
            "polcoh", scene.pauli_alpha_slc, scene.pauli_beta_slc,
            scene.pauli_gamma_slc, scene.pauli_alpha_slc + ".par",
            scene.pauli_beta_slc + ".par", scene.pauli_gamma_slc + ".par",
            scene.basename, scene.basename + "_mat_coh.par", rlks, azlks
        ], os.path.dirname(scene.pauli_alpha_slc), path_log)

# rename files to consistent pattern
for filename in finder(os.getcwd(), ["*.t*"]):
    os.rename(filename, filename.replace(".t", "_t"))

print "...done"
print "#############################################"
Beispiel #17
0
        name_off = name_int[:-3] + "off"
        name_base_init = name_int[:-3] + "base_init"
        name_base_refine = name_int[:-3] + "base_refine"
        name_base_res = name_int[:-3] + "base_res"
        par_dem = ReadPar(dem_map + ".par")
        par_mli = ReadPar(primary + "_mli.par")
        ph_sim = name_int[:-3] + "ph_sim"

        if not os.path.isfile(dem_map):
            print "DEM", dem_map, "missing"
            continue

        if not os.path.isfile(dem_rdc):
            print "...transforming DEM to range-doppler coordinates"
            run([
                "geocode", lut_fine, dem_map, par_dem.width, dem_rdc,
                par_mli.range_samples, par_mli.azimuth_lines, "2"
            ], os.path.dirname(dem_map), path_log)
        else:
            print "...found DEM in range-doppler coordinates"

        print "...initial DEM interferogram simulation"
        run([
            "phase_sim", primary + ".par", name_off, name_base_init, dem_rdc,
            ph_sim
        ], os.path.dirname(name_int), path_log)

        print "...initial differential interferogram generation"
        run([
            "SLC_diff_intf", primary, secondary, primary + ".par",
            secondary + ".par", name_off, ph_sim, diff_int,
            par_mli.range_looks, par_mli.azimuth_looks
Beispiel #18
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles,
         intermediates):

    # Definition geocode_back interpolation function
    # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
    # 0 - Nearest Neighbor
    # 1 - Bicubic Spline
    # 2 - Bicubic Spline-Log
    func_geoback = 2

    # function for interpolation of layover/shadow/foreshortening/DEM gaps
    # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
    func_interp = 0

    # define target resolution; multilooking factors will be computed automatically
    res_target = 20

    # set DEM resolution and compute oversampling factor
    res_dem = 100
    dem_ovs = res_dem // res_target

    print "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
        zipfile[:-4], asctime())

    ######################################################################

    pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

    # unzip the dataset
    try:
        with zf.ZipFile(zipfile, "r") as z:
            scene = sorted(z.namelist())[0].strip("/")
            match = re.match(pattern, scene)
            orbit = "D" if float(
                re.findall("[0-9]{6}",
                           match.group("start"))[1]) < 120000 else "A"
            outname_base = "_".join([
                os.path.join(outdir, match.group("sat")),
                match.group("beam"),
                match.group("start").replace("T", "_"), orbit
            ])

            if not os.path.exists(os.path.join(tempdir, scene)) and len(
                    finder(outdir, [os.path.basename(outname_base)],
                           regex=True)) == 0:
                if not z.testzip():
                    print "unzipping data..."
                    # print z.testzip()
                    z.extractall(tempdir)
                else:
                    print "corrupt zip"
                    return
            else:
                print "file already imported/processed"
                return
            tempdir = os.path.join(tempdir, scene)
    except ImportError:
        print "...skipped"
        return

    # create logfile folder if this option was selected
    if logfiles:
        path_log = outname_base + "_log"
        if os.path.exists(path_log):
            shutil.rmtree(path_log)
        os.makedirs(path_log)
    else:
        path_log = None

    ######################################################################
    print "converting to GAMMA format..."
    try:
        run([sys.executable,
             os.path.join(os.getcwd(), "reader.py"), tempdir],
            outdir=tempdir,
            logpath=path_log)
    except ImportError:
        print "...failed"
        return

    # gather all imported files
    files_mli = finder(tempdir, ["*_mli"])

    # compute multilooking factors
    par = ReadPar(files_mli[0] + ".par")
    rlks = int(round(res_target / float(par.range_pixel_spacing)))
    azlks = int(round(res_target / float(par.azimuth_pixel_spacing)))

    # perform multilooking
    for item in files_mli:
        run([
            "multi_look_MLI", item, item + ".par", item[:-3] + "mli2",
            item[:-3] + "mli2.par", rlks, azlks
        ],
            logpath=path_log)

    # gather all newly created MLIs
    files_mli = finder(tempdir, ["*_mli2"])

    # select master image
    master = files_mli[0]

    base = "_".join(master.split("_")[:-1]) + "_"
    dem_seg = base + "dem"
    lut = base + "lut"
    lut_fine = base + "lut_fine"
    sim_sar = base + "sim_sar"
    u = base + "u"
    v = base + "v"
    inc = base + "inc"
    psi = base + "psi"
    pix = base + "pix"
    ls_map = base + "ls_map"
    pixel_area = base + "pixel_area"
    pixel_area2 = base + "pixel_area2"
    offs = base + "offs"
    coffs = base + "coffs"
    coffsets = base + "coffsets"
    snr = base + "snr"
    ellipse_pixel_area = base + "ellipse_pixel_area"
    ratio_sigma0 = base + "ratio_sigma0"

    # read image parameter file for meta information
    par = ReadPar(master + ".par")

    incidence = str(int(float(par.incidence_angle)))

    outname_base = outname_base + "_" + incidence

    ######################################################################
    # colelct srtm file sand mosaic them

    # define a name for the output mosaic
    name_srtm = os.path.join(tempdir, "srtm")

    # collect srtm tiles (if tiles are not found in the defined srtm directory, they are automatically downloaded to the temporary directory)
    targets = srtm.hgt_collect([x + ".par" for x in files_mli],
                               tempdir,
                               demdir=srtmdir)

    print "preparing SRTM data..."
    srtm.mosaic(targets, name_srtm)

    # interpolate data gaps
    srtm.fill(name_srtm, name_srtm + "_fill", path_log, replace=True)
    name_srtm += "_fill"

    # project DEM to UTM
    if transform:
        srtm.transform(name_srtm, name_srtm + "_utm")
        name_srtm += "_utm"
    ######################################################################
    # create DEM products
    print "sar image simulation..."
    try:
        run([
            "gc_map", master + ".par", "-", name_srtm + ".par", name_srtm,
            dem_seg + ".par", dem_seg, lut, dem_ovs, dem_ovs, sim_sar, u, v,
            inc, psi, pix, ls_map, 8, func_interp
        ],
            logpath=path_log)
    except IOError:
        print "...failed"
        return

    ######################################################################
    print "initial pixel area estimation..."
    run([
        "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut, ls_map,
        inc, pixel_area
    ],
        logpath=path_log)

    ######################################################################
    print "exact offset estimation..."
    try:
        inlist = ["", "0 0", "100 100", "128 128", 7.0]
        run(["create_diff_par", master + ".par", "-", master + "_diff.par", 1],
            inlist=inlist,
            logpath=path_log)
        run([
            "offset_pwrm", master, pixel_area, master + "_diff.par", offs, snr,
            128, 128, offs + ".txt", "-", 200, 200, 7.0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "computation of offset polynomials..."
    try:
        run([
            "offset_fitm", offs, snr, master + "_diff.par", coffs, coffsets,
            "-", 4, 0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "supplementing lookuptable with offset polynomials..."
    try:
        sim_width = ReadPar(dem_seg + ".par").width
        run(["gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine, 0],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "refined pixel area estimation..."
    try:
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut_fine,
            ls_map, inc, pixel_area2
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "radiometric calibration and normalization..."
    try:
        slc_width = ReadPar(master + ".par").range_samples
        run([
            "radcal_MLI", master, master + ".par", "-", master + "_cal", "-",
            0, 0, 1, 0.0, "-", ellipse_pixel_area
        ],
            logpath=path_log)
        run([
            "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0, slc_width,
            1, 1
        ],
            logpath=path_log)
        for item in files_mli:
            run([
                "product", item, ratio_sigma0, item + "_pixcal", slc_width, 1,
                1
            ],
                logpath=path_log)
    except:
        print "...failed"
        return
    ######################################################################
    print "backward geocoding, normalization and conversion to dB..."
    for item in files_mli:
        run([
            "geocode_back", item + "_pixcal", slc_width, lut_fine,
            item + "_geo", sim_width, 0, func_geoback
        ],
            logpath=path_log)

        run([
            "lin_comb", "1", item + "_geo", 0,
            math.cos(math.radians(float(par.incidence_angle))),
            item + "_geo_flat", sim_width
        ],
            logpath=path_log)
        run([
            "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
            sim_width
        ],
            logpath=path_log)

    ######################################################################

    print "creating final tiff files..."
    for item in finder(tempdir, ["*_geo_norm"]):
        polarization = re.findall("[HV]{2}", os.path.basename(item))[0].lower()
        outname = outname_base + "_" + polarization
        run([
            "data2geotiff", dem_seg + ".par", item, 2,
            outname + "_geocoded_norm.tif"
        ],
            logpath=path_log)
        annotation_dir = os.path.join(tempdir, "annotation")
        annotation = os.path.join(annotation_dir, [
            x for x in os.listdir(annotation_dir)
            if polarization in os.path.basename(x)
        ][0])
        os.rename(annotation, outname + "_annotation.xml")

    ######################################################################
    print "cleaning up..."
    # copy, rename and edit quicklook kml and png
    shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                    outname_base + "_quicklook.kml")
    shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                    outname_base + "_quicklook.png")
    with open(outname_base + "_quicklook.kml", "r") as infile:
        kml = infile.read().replace(
            "quick-look.png",
            os.path.basename(outname_base + "_quicklook.png"))
    with open(outname_base + "_quicklook.kml", "w") as outfile:
        outfile.write(kml)

    if not intermediates:
        shutil.rmtree(tempdir)

    if logfiles:
        os.rename(path_log, outname_base + "_log")

    print "...done:", asctime()
    print "##########################################"
Beispiel #19
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles, intermediates,
         verbose):

    with tempfile.NamedTemporaryFile(delete=False, dir=outdir) as mainlog:

        # Definition geocode_back interpolation function
        # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
        # 0 - Nearest Neighbor
        # 1 - Bicubic Spline
        # 2 - Bicubic Spline-Log
        func_geoback = 2

        # function for interpolation of layover/shadow/foreshortening/DEM gaps
        # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
        func_interp = 0

        # Definition of the multi-look factor
        # Enter the number of looks you want to use in range and azimuth"
        # Try to achieve Level 1.5 azimuth pixel dimension and squarish pixels in GR image"
        # number of looks in range
        ml_rg = 4
        # number of looks in azimuth
        ml_az = 2

        # DEM oversampling factor"
        # for S1 GRDH: final resolution: 20m
        # 30m SRTM:
        # dem_ovs = 1.5
        # 90m SRTM:
        dem_ovs = 5

        message = "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
            zipfile[:-4], asctime())
        print message if verbose else mainlog.writelines(message)
        if not verbose:
            os.rename(mainlog.name, os.path.join(outdir, "main.log"))
        ######################################################################

        pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

        # unzip the dataset
        try:
            with zf.ZipFile(zipfile, "r") as z:
                scene = sorted(z.namelist())[0].strip("/")
                match = re.match(pattern, scene)
                orbit = "D" if float(
                    re.findall("[0-9]{6}",
                               match.group("start"))[1]) < 120000 else "A"
                outname_base = "_".join([
                    os.path.join(outdir, match.group("sat")),
                    match.group("beam"),
                    match.group("start").replace("T", "_"), orbit
                ])

                if not os.path.exists(os.path.join(tempdir, scene)) and len(
                        finder(outdir, [os.path.basename(outname_base)],
                               regex=True)) == 0:
                    if not z.testzip():
                        if verbose:
                            print "unzipping data..."
                        # print z.testzip()
                        z.extractall(tempdir)
                        tempdir = os.path.join(tempdir, scene)
                    else:
                        print "Corrupt zip"
                        return
                else:
                    print "file already imported/processed"
                    return
        except ImportError:
            print "...skipped"
            return

        # create logfile folder if this option was selected
        if logfiles:
            path_log = outname_base + "_log"
            if os.path.exists(path_log):
                shutil.rmtree(path_log)
            os.makedirs(path_log)
        else:
            path_log = None

        ######################################################################
        print "converting to GAMMA format..."
        try:
            run([
                "/usr/local/bin/python2.7",
                os.path.join(os.getcwd(), "reader.py"), tempdir
            ],
                outdir=tempdir,
                logpath=path_log)
        except ImportError:
            print "...failed"
            return

        files_slc = finder(tempdir, ["*_slc"])
        if len(files_slc) > 0:
            if verbose:
                print "multilooking..."
            for item in files_slc:
                run([
                    "multi_look", item, item + ".par", item[:-3] + "mli",
                    item[:-3] + "mli.par", ml_rg, ml_az
                ],
                    logpath=path_log)

        files_mli = finder(tempdir, ["*_mli"])

        master = files_mli[0]

        base = master[:-3]
        dem_seg = base + "dem"
        lut = base + "lut"
        lut_fine = base + "lut_fine"
        sim_sar = base + "sim_sar"
        u = base + "u"
        v = base + "v"
        inc = base + "inc"
        psi = base + "psi"
        pix = base + "pix"
        ls_map = base + "ls_map"
        pixel_area = base + "pixel_area"
        pixel_area2 = base + "pixel_area2"
        offs = base + "offs"
        coffs = base + "coffs"
        coffsets = base + "coffsets"
        snr = base + "snr"
        ellipse_pixel_area = base + "ellipse_pixel_area"
        ratio_sigma0 = base + "ratio_sigma0"

        # read image parameter file for meta information

        par = ReadPar(master + ".par")

        incidence = str(int(float(par.incidence_angle)))

        outname_base = outname_base + "_" + incidence

        ######################################################################
        if verbose:
            print "mosaicing SRTM data..."

        name_srtm = os.path.join(tempdir, "srtm")

        # extract corner coordinates from gamma parameter files and concatenate names of required hgt files
        lat, lon = srtm.latlon([x + ".par" for x in files_mli])
        target_ids = srtm.hgt(lat, lon)

        # search for required tiles in the defined srtm directory
        targets = finder(srtmdir, target_ids)

        # copy hgt files to temporary directory
        if len(targets) > 0:
            for item in targets:
                shutil.copy(item, tempdir)
            targets = finder(tempdir, target_ids)
        else:
            print "...failed"
            return

        # create gamma parameter files for all DEMs
        srtm.dempar(targets)

        # mosaic hgt files
        srtm.mosaic(targets, name_srtm)

        # interpolate data gaps
        srtm.replace(name_srtm, name_srtm + "_fill", path_log)
        os.remove(name_srtm)
        os.remove(name_srtm + ".par")
        name_srtm += "_fill"

        # project DEM to UTM
        if transform:
            if verbose:
                print "reprojecting DEM..."
            srtm.transform(name_srtm, name_srtm + "_utm")
            name_srtm += "_utm"

        # remove hgt files from temporary directory
        for item in targets:
            os.remove(item)
            os.remove(item + ".par")
        ######################################################################
        # create DEM products; command is automatically chosen based on  SAR imagery parameter file entries (flawless functioning yet to be tested)
        if verbose:
            print "sar image simulation..."
        try:
            if ReadPar(master + ".par").image_geometry == "GROUND_RANGE":
                run([
                    "gc_map_grd", master + ".par", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
            else:
                run([
                    "gc_map", master + ".par", "-", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
        except IOError:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "initial pixel area estimation..."
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut,
            ls_map, inc, pixel_area
        ],
            logpath=path_log)

        ######################################################################
        if verbose:
            print "exact offset estimation..."
        try:
            inlist = ["", "0 0", "100 100", "128 128", "7.0"]
            run([
                "create_diff_par", master + ".par", "-", master + "_diff.par",
                1
            ],
                inlist=inlist,
                logpath=path_log)
            run([
                "offset_pwrm", master, pixel_area, master + "_diff.par", offs,
                snr, 128, 128, offs + ".txt", "-", 200, 200, 7.0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "computation of offset polynomials..."
        try:
            run([
                "offset_fitm", offs, snr, master + "_diff.par", coffs,
                coffsets, "-", 4, 0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "supplementing lookuptable with offset polynomials..."
        try:
            sim_width = ReadPar(dem_seg + ".par").width
            run([
                "gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine,
                0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "refined pixel area estimation..."
        try:
            run([
                "pixel_area", master + ".par", dem_seg + ".par", dem_seg,
                lut_fine, ls_map, inc, pixel_area2
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "radiometric calibration and normalization..."
        try:
            slc_width = ReadPar(master + ".par").range_samples
            run([
                "radcal_MLI", master, master + ".par", "-", master + "_cal",
                "-", 0, 0, 1, 0.0, "-", ellipse_pixel_area
            ],
                logpath=path_log)
            run([
                "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0,
                slc_width, 1, 1
            ],
                logpath=path_log)
            for item in files_mli:
                run([
                    "product", item, ratio_sigma0, item + "_pixcal", slc_width,
                    1, 1
                ],
                    logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "backward geocoding, normalization and conversion to dB..."
        for item in files_mli:
            run([
                "geocode_back", item + "_pixcal", slc_width, lut_fine,
                item + "_geo", sim_width, 0, func_geoback
            ],
                logpath=path_log)

            run([
                "lin_comb", "1", item + "_geo", 0,
                math.cos(math.radians(float(par.incidence_angle))),
                item + "_geo_flat", sim_width
            ],
                logpath=path_log)
            run([
                "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
                sim_width
            ],
                logpath=path_log)

        ######################################################################

        print "creating final tiff files..."
        for item in finder(tempdir, ["*_geo_norm"]):
            polarization = re.findall("[HV]{2}",
                                      os.path.basename(item))[0].lower()
            outname = outname_base + "_" + polarization
            run([
                "data2geotiff", dem_seg + ".par", item, 2,
                outname + "_geocoded_norm.tif"
            ],
                logpath=path_log)
            annotation_dir = os.path.join(tempdir, "annotation")
            annotation = os.path.join(annotation_dir, [
                x for x in os.listdir(annotation_dir)
                if polarization in os.path.basename(x)
            ][0])
            os.rename(annotation, outname + "_annotation.xml")

        ######################################################################
        if verbose:
            print "cleaning up..."
        # copy, rename and edit quicklook kml and png
        shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                        outname_base + "_quicklook.kml")
        shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                        outname_base + "_quicklook.png")
        with open(outname_base + "_quicklook.kml", "r") as infile:
            kml = infile.read().replace(
                "quick-look.png",
                os.path.basename(outname_base + "_quicklook.png"))
        with open(outname_base + "_quicklook.kml", "w") as outfile:
            outfile.write(kml)

        if not intermediates:
            shutil.rmtree(tempdir)

        if logfiles:
            os.rename(path_log, outname_base + "_log")

        if verbose:
            print "...done:", asctime()
            print "##########################################"
# find flattened interferograms
list_flt = finder(path_out, ["*_int_diff"]) if differential else finder(path_out, ["*_flt"])

if len(list_flt) > 0:
    print "#############################################"
    print "estimation started..."

    for name_flt in list_flt:
        # extract timestamps from flt name
        id_pwr = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_flt)
        # find mli/rmli images matching the extracted timestamps
        try:
            name_pwr1 = finder(os.getcwd(), ["*"+id_pwr[0]+"_mli"])[0]
            name_pwr2 = finder(os.getcwd(), ["*"+id_pwr[1]+"_reg_mli"])[0]
        except:
            raise IOError("multilooked images missing")
        # concatenate coherence image name
        name_cc = name_flt+"_cc_wave"
        print os.path.basename(name_cc)
        # read image samples
        samples = str(ISPPar(name_pwr1 + '.par').range_samples)
        # run gamma command
        run(["cc_wave", name_flt, name_pwr1, name_pwr2, name_cc, samples, par.bx, par.by, par.wgt_wave], path_out, path_log)

    print "...estimation finished"
    print "#############################################"
else:
    print "#############################################"
    print "no {0} interferograms found".format("differential" if differential else "flattened")
    print "#############################################"
Beispiel #21
0
            os.remove(x + ".hdr")
            os.remove(x + ".aux.xml")

        # update processing list with new names
        processlist = [x[:-1] for x in processlist]

        # create table textfile for temporal filter
        name_ftab = os.path.join(os.getcwd(), "PAR/tab_tempfilt_" + tag)
        with open(name_ftab, "w") as out:
            for image in processlist:
                out.write(image + "\t" + image[:-3] + "tfilt\n")

        # read header file
        meta = HDRobject(processlist[0][:-3] + "tfilt.hdr")

        # perform temporal filtering
        run([
            "temp_filt", name_ftab, meta.samples, par.waz, par.wr,
            par.wgt_tfilt
        ], os.getcwd(), path_log)

        for x in processlist:
            os.remove(x)
if counter == 0:
    print "#############################################"
    print "no candidates for filtering found"
    print "#############################################"
else:
    print "...done"
    print "#############################################"
Beispiel #22
0
path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

# create list of scene tuple objects
# all images following the defined patterns within the same folder (i.e. the same acquisition) will be grouped together
tuples = grouping()

print "#############################################"
print "creating pauli decomposition..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc"])
           & set(scene.__dict__.keys())) == 3:
        print scene.basename
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        name_out = os.path.join(path_out,
                                os.path.basename(scene.HH_slc)[:-6] + "pauli")
        run([
            "pauli", scene.HH_slc, scene.VV_slc, scene.HV_slc, scene.HH_slc +
            ".par", scene.VV_slc + ".par", scene.HV_slc + ".par", name_out
        ], os.getcwd(), path_log)

# rename files to consistent pattern
for filename in finder(os.getcwd(), ["*.slc*"]):
    os.rename(filename, filename.replace(".slc", "_slc"))

print "...done"
print "#############################################"
Beispiel #23
0
        os.mkdir(path)

interferograms = finder(path_out, ["*int"])

if len(interferograms) > 0:
    print "#############################################"
    print "baseline estimation started..."
    for name_int in interferograms:
        name_off = name_int[:-3] + "off"
        name_base = name_int[:-3] + "base_init"
        print os.path.basename(name_base)

        slc = finder(os.getcwd(), [
            re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?",
                       name_int)[0]
        ])[0]
        # rslc = finder(os.getcwd(), [re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_int)[1]+"_reg"])[0]
        rslc = name_int[:-3] + "reg"

        run([
            "base_init", slc + ".par", rslc + ".par", name_off, name_int,
            name_base, par.method_flag, par.nrfft, par.nazfft, par.r_samp,
            par.az_line
        ], path_out, path_log)

    print "...done"
    print "#############################################"
else:
    print "#############################################"
    print "no interferograms found"
    print "#############################################"
    print "converting data type"
    sp.check_call([{
        "1": "uchar2float",
        "2": "short2float"
    }[header.data_type], dem, dem + "_f"],
                  stdout=sp.PIPE)
    os.remove(dem)
    os.rename(dem + "_f", dem)
    header.data_type = "4"
    process += 1

if process > 0:
    hdr(header)

# create gamma parameter file
if not os.path.isfile(dem + ".par"):
    print "creating parameter file"
    false_northing = "0" if "North" in header.map_info else "10000000"
    posting = "-" + header.map_info[6] + " " + header.map_info[5]
    topleft = header.map_info[4] + " " + header.map_info[3]
    dempar = [
        "UTM", "WGS84", "1", header.map_info[7], false_northing,
        os.path.basename(dem), "", "", "", header.samples, header.lines,
        posting, topleft
    ]
    run(["create_dem_par", dem + ".par"], path_dem, path_log, inlist=dempar)
    process += 1

if process == 0:
    print "nothing to be done"
Beispiel #25
0
    if not os.path.exists(path):
        os.makedirs(path)

list_K_dB = {"PSR1": "-115.0"}

list_slc = finder(os.getcwd(), ["*_slc"])

if len(list_slc) > 0:
    print "#############################################"
    print "calibration started..."

    for name_slc in list_slc:
        sensor = name_slc.split("_")[0]
        if sensor in list_K_dB:
            K_dB = list_K_dB[sensor]
        else:
            print "calibration for sensor " + sensor + "not implemented"

        name_cslc = name_slc[:-3] + "cslc"

        run([
            "radcal_SLC", name_slc, name_slc + ".par", name_cslc,
            name_cslc + ".par", "1", "-", "0", "0", "0", "0", "-", K_dB
        ], path_out, path_log)

    print "...done"
    print "#############################################"
else:
    print "#############################################"
    print "no SLCs found"
    print "#############################################"
Beispiel #26
0
par = ReadPar(meta)
mlk = Spacing(par, sys.argv[1])

# define (and create) directories for logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

print "slc range pixel spacing (slant, ground):", par.range_pixel_spacing, mlk.groundRangePS
print "slc azimuth pixel spacing:", par.azimuth_pixel_spacing
print "number of looks looks (range, azimuth):", mlk.rlks, mlk.azlks
print "mli range pixel spacing (slant, ground):", int(mlk.rlks) * float(
    par.range_pixel_spacing), int(mlk.rlks) * mlk.groundRangePS
print "mli azimuth pixel spacing:", int(mlk.azlks) * float(
    par.azimuth_pixel_spacing)
print "-----------"

# concatenate output names
out_data = data + "_mli"
out_meta = out_data + ".par"

# set scaling factor
scale = 0.000001 if "ERS" in par.sensor else 1.0

# perform gamma command
run([
    "multi_look", data, meta, out_data, out_meta, mlk.rlks, mlk.azlks, "-",
    "-", scale
], os.getcwd(), path_log)
hdr(out_meta)