Exemplo n.º 1
0
def transform(infile, outfile, posting=90):

    # read DEM parameter file
    par = ReadPar(infile + ".par")

    # transform corner coordinate to UTM
    utm = UTM(infile + ".par")

    if os.path.isfile(outfile + ".par"):
        os.remove(outfile + ".par")
    if os.path.isfile(outfile):
        os.remove(outfile)

    # determine false northing from parameter file coordinates
    falsenorthing = "10000000." if "-" in par.corner_lat else "0"

    # create new DEM parameter file with UTM projection details
    inlist = [
        "UTM", "WGS84", 1, utm.zone, falsenorthing, outfile, "", "", "", "",
        "", "-" + str(posting) + " " + str(posting), ""
    ]
    run(["create_dem_par", outfile + ".par"], inlist=inlist)

    # transform dem
    run([
        "dem_trans", infile + ".par", infile, outfile + ".par", outfile, "-",
        "-", "-", 1
    ])
Exemplo n.º 2
0
    def __init__(self, args):
        Toplevel.__init__(self)
        self.config(Environment.bcolor)
        self.resizable(width=FALSE, height=FALSE)
        Frame(self, Environment.bcolor, height=10, bd=5).pack()

        self.title = args[0][0]

        self.action = args[0] if len(args[0]) == 1 else [args[0][1], args[0][2]]

        self.objList = []

        # define names of dropdown menu options
        self.dropnames = {}

        # extra vertical window space
        self.space = 0

        # add window label from args_extra
        if args[0][0] in Environment.args_extra:
            self.label = Label(self, Environment.header_ops, text=Environment.args_extra[args[0][0]])
            self.label.pack()
            self.space = 10

        if len(args[0]) > 1:
            parname = os.path.join(Environment.workdir.get(), "PAR", os.path.splitext(args[0][2])[0]+".par")
            if os.path.isfile(parname):
                self.params = ReadPar(parname, splits="[\t\n]")

        for i in range(0, len(args[1])):
            if args[1][i] in ["import directory", "SRTM archive"]:
                self.objList.append(FileQuery(self, args[1][i], 2))
            elif args[1][i] == "output file":
                self.objList.append(FileQuery(self, args[1][i], 3))
            else:
                self.objList.append(FileQuery(self, args[1][i], 1))

        # define y-dimension of the dialog window
        self.ydim = 80 + self.space + len(args[1]) * 30 + len(args[3]) * 30

        # check whether parameter textfile for the chosen command exists;
        # if so then use the parameters from the file, otherwise use the defaults defined in class Main (gammaGUI.py)

        if hasattr(self, "params"):
            if "SRTM_archive" in self.params.index:
                self.params.index.remove("SRTM_archive")
            self.ents = makeform(self, self.params.index, [getattr(self.params, attr) for attr in self.params.index if attr != "SRTM_archive"])
        else:
            self.ents = makeform(self, args[-2], args[-1])


        # set window appearance
        self.geometry("600x" + str(self.ydim))
        Frame(self, bg="white", height=2).pack({"fill": "x"})
        Frame(self, Environment.bcolor, height=10, bd=5).pack()

        # create execution button
        self.Action = Button(self, Environment.button_ops, text="Action", padx=40, command=lambda: execute(self.action, self.objList, self.ents))
        self.Action.pack()
Exemplo n.º 3
0
def fill(dem, dem_out, logpath, replace=False):

    width = ReadPar(dem + ".par").width

    path_dem = os.path.dirname(dem_out)

    rpl_flg = 0
    dtype = 4

    # replace values
    value = 0
    new_value = 1
    run([
        "replace_values", dem, value, new_value, dem + "_temp", width, rpl_flg,
        dtype
    ], path_dem, logpath)

    value = -32768
    new_value = 0
    run([
        "replace_values", dem + "_temp", value, new_value, dem + "_temp2",
        width, rpl_flg, dtype
    ], path_dem, logpath)

    # interpolate missing values
    r_max = 9
    np_min = 40
    np_max = 81
    w_mode = 2
    run([
        "interp_ad", dem + "_temp2", dem_out, width, r_max, np_min, np_max,
        w_mode, dtype
    ], path_dem, logpath)

    # remove temporary files
    os.remove(dem + "_temp")
    os.remove(dem + "_temp2")

    # duplicate parameter file for newly created dem
    shutil.copy(dem + ".par", dem_out + ".par")

    # create ENVI header file
    hdr(dem_out + ".par")

    if replace:
        for item in [
                dem + x for x in ["", ".par", ".hdr", ".aux.xml"]
                if os.path.isfile(dem + x)
        ]:
            os.remove(item)
Exemplo n.º 4
0
def main():
    print "#############################################"
    print "preparing SRTM mosaic:"
    # read parameter textfile
    par = ReadPar(os.path.join(os.getcwd(), "PAR/srtm_preparation.par"))

    demdir = None
    if hasattr(par, "SRTM_archive"):
        if os.path.isdir(par.SRTM_archive):
            demdir = par.SRTM_archive

    parfiles = finder(os.getcwd(), ["*slc.par", "*mli.par", "*cal.par"])

    # define (and create) directories for processing results and logfile
    path_dem = os.path.join(os.getcwd(), "DEM/")
    path_log = os.path.join(os.getcwd(), "LOG/GEO/")
    for path in [path_log, path_dem]:
        if not os.path.exists(path):
            os.makedirs(path)

    # find SRTM tiles for mosaicing
    demlist = hgt_collect(parfiles, path_dem, demdir=demdir)

    # remove files created by this function
    for item in finder(path_dem, ["mosaic*", "dem*", "*.par"]):
        os.remove(item)

    if len(demlist) == 0:
        raise IOError("no hgt files found")

    # perform mosaicing if multiple files are found
    if len(demlist) > 1:
        print "mosaicing..."
        dem = os.path.join(path_dem, "mosaic")
        mosaic(demlist, dem)
    else:
        dem = demlist[0]
        dempar(dem)
    fill(dem, os.path.join(path_dem, "dem_final"), path_log)
    dem = os.path.join(path_dem, "dem_final")

    # transform DEM to UTM
    if par.utm == "True":
        print "transforming to UTM..."
        transform(dem, dem + "_utm", int(par.targetres))
        hdr(dem + "_utm.par")
    print "...done"
    print "#############################################"
Exemplo n.º 5
0
##############################################################
# Calculate covariance matrix C elements from HH, HV, and VV SLC data
# module of software gammaGUI
# John Truckenbrodt 2015
##############################################################

import os

from ancillary import grouping, run, finder, ReadPar

path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/mat_cov.par"))

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating covariance matrices..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc", "HH_mli"]) & set(scene.__dict__.keys())) == 4:
        print scene.basename
        rlks = ReadPar(scene.HH_mli+".par").range_looks
        azlks = ReadPar(scene.HH_mli+".par").azimuth_looks
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        run(["polcovar", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.HH_slc+".par", scene.HV_slc+".par", scene.VV_slc+".par", os.path.basename(scene.basename),
Exemplo n.º 6
0
##############################################################
# Main GUI Interface                                         #
# John Truckenbrodt                                          #
##############################################################
import os

from ancillary import grouping, run, ReadPar

path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/circular.par"))

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "transforming scenes..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc"])
           & set(scene.__dict__.keys())) == 3:
        print scene.basename
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        run([
            "lin_comb_cpx", "3", scene.HH_slc, scene.VV_slc, scene.HV_slc,
            par.constant_r, par.constant_i, par.factorHH_r, par.factorHH_i,
            par.factorVV_r, par.factorVV_i, par.factorHV_r, par.factorHV_i,
Exemplo n.º 7
0
##############################################################
# Cloude target decomposition from elements of scattering and coherency matrix
# module of software gammaGUI
# John Truckenbrodt 2015
##############################################################

import os

from ancillary import grouping, run, finder, ReadPar, hdr

path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/cloude.par"))

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating cloude decomposition..."

counter = 0

for scene in tuples:
    if len({"HH_slc", "VV_slc", "HV_slc", "t12", "t13", "HH_mli"}
           & set(scene.__dict__.keys())) == 6:
        counter += 1
        print scene.basename
        rlks = ReadPar(scene.HH_mli + ".par").range_looks
        azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks
Exemplo n.º 8
0
path_log = os.path.join(os.getcwd(), "LOG/LAT/")

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating krogager decomposition..."

for scene in tuples:
    if len(
            set(["HH_slc", "rl", "ll", "rr", "HH_mli"])
            & set(scene.__dict__.keys())) == 5:
        print scene.basename
        path_out = os.path.dirname(scene.rl)
        mlipar = scene.HH_mli + ".par"
        rlks = ReadPar(mlipar).range_looks
        azlks = ReadPar(mlipar).azimuth_looks
        run([
            "multi_look", scene.rl, scene.HH_slc + ".par",
            scene.basename + "_sphere", mlipar, rlks, azlks
        ], path_out, path_log)
        run([
            "diplane_helix", scene.ll, scene.rr, scene.HH_slc + ".par",
            scene.basename + "_diplane", scene.basename + "_helix", mlipar,
            rlks, azlks, "-", "-", "-"
        ], path_out, path_log)

        for tag in ["_sphere", "_helix", "_diplane"]:
            hdr(scene.HH_mli + ".par",
                os.path.join(path_out, scene.basename) + tag + ".hdr")
Exemplo n.º 9
0
-resampling of slc file
"""

import sys

import os

from isp_parameterfile import ISPPar
from ancillary import ReadPar, run, Spacing

# retrieve additional arguments
slc1 = sys.argv[1]
slc2 = sys.argv[2]

# read processing parameter textfile
par = ReadPar(os.path.join(os.getcwd(), "PAR/coreg.par"))

# set SNR theshold (this should not be changed)
thres = 7.0

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.makedirs(path)

# concatenate output names
name_base = os.path.basename(slc1) + "_" + os.path.basename(slc2) + "_"
name_coffs = name_base + "coffs"
name_coffsets = name_base + "coffsets"
Exemplo n.º 10
0
from ancillary import ReadPar, grouping, run, hdr, Tuple, dissolve


# create list of scene tuple objects
tuples = grouping()

# name of the dem to be used for geocoding
dem = sys.argv[1]

# define (and create) directory for logfile
path_log = os.path.join(os.getcwd(), "LOG/GEO/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

# read processing parameter textfile
par = ReadPar(os.path.join(os.getcwd(), "PAR/geocoding.par"), type="exe")

# perform topographic normalization?
tnorm = True if par.topographic_normalization == "True" else False

# set SNR theshold (this should not be changed)
thres = 7.0

for scene in tuples:

    # image ids in descending order of priority for master selection
    prioritytags = ["HH[_a-z]*_mli$", "VV[_a-z]*_mli$", "HV[_a-z]*_mli$", "VH[_a-z]*_mli$"]

    # select master
    master = ""
    for tag in prioritytags:
Exemplo n.º 11
0
import os
import re

from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par"))

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

interferograms = finder(path_out, ["*int"])

if len(interferograms) > 0:
    print "#############################################"
    print "interferogram flattening started..."
    for name_int in interferograms:
        print os.path.basename(name_int)

        # retrieve full name of primary and secondary SLC files
        scenes = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?",
                            name_int)
        primary = finder(os.getcwd(), [scenes[0] + "$"], regex=True)[0]
        # secondary = finder(os.getcwd(), [scenes[1]+"_reg"], regex=True)[0]
        secondary = name_int[:-3] + "reg"

        # collect geocoding lookup tables
Exemplo n.º 12
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles,
         intermediates):

    # Definition geocode_back interpolation function
    # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
    # 0 - Nearest Neighbor
    # 1 - Bicubic Spline
    # 2 - Bicubic Spline-Log
    func_geoback = 2

    # function for interpolation of layover/shadow/foreshortening/DEM gaps
    # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
    func_interp = 0

    # define target resolution; multilooking factors will be computed automatically
    res_target = 20

    # set DEM resolution and compute oversampling factor
    res_dem = 100
    dem_ovs = res_dem // res_target

    print "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
        zipfile[:-4], asctime())

    ######################################################################

    pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

    # unzip the dataset
    try:
        with zf.ZipFile(zipfile, "r") as z:
            scene = sorted(z.namelist())[0].strip("/")
            match = re.match(pattern, scene)
            orbit = "D" if float(
                re.findall("[0-9]{6}",
                           match.group("start"))[1]) < 120000 else "A"
            outname_base = "_".join([
                os.path.join(outdir, match.group("sat")),
                match.group("beam"),
                match.group("start").replace("T", "_"), orbit
            ])

            if not os.path.exists(os.path.join(tempdir, scene)) and len(
                    finder(outdir, [os.path.basename(outname_base)],
                           regex=True)) == 0:
                if not z.testzip():
                    print "unzipping data..."
                    # print z.testzip()
                    z.extractall(tempdir)
                else:
                    print "corrupt zip"
                    return
            else:
                print "file already imported/processed"
                return
            tempdir = os.path.join(tempdir, scene)
    except ImportError:
        print "...skipped"
        return

    # create logfile folder if this option was selected
    if logfiles:
        path_log = outname_base + "_log"
        if os.path.exists(path_log):
            shutil.rmtree(path_log)
        os.makedirs(path_log)
    else:
        path_log = None

    ######################################################################
    print "converting to GAMMA format..."
    try:
        run([sys.executable,
             os.path.join(os.getcwd(), "reader.py"), tempdir],
            outdir=tempdir,
            logpath=path_log)
    except ImportError:
        print "...failed"
        return

    # gather all imported files
    files_mli = finder(tempdir, ["*_mli"])

    # compute multilooking factors
    par = ReadPar(files_mli[0] + ".par")
    rlks = int(round(res_target / float(par.range_pixel_spacing)))
    azlks = int(round(res_target / float(par.azimuth_pixel_spacing)))

    # perform multilooking
    for item in files_mli:
        run([
            "multi_look_MLI", item, item + ".par", item[:-3] + "mli2",
            item[:-3] + "mli2.par", rlks, azlks
        ],
            logpath=path_log)

    # gather all newly created MLIs
    files_mli = finder(tempdir, ["*_mli2"])

    # select master image
    master = files_mli[0]

    base = "_".join(master.split("_")[:-1]) + "_"
    dem_seg = base + "dem"
    lut = base + "lut"
    lut_fine = base + "lut_fine"
    sim_sar = base + "sim_sar"
    u = base + "u"
    v = base + "v"
    inc = base + "inc"
    psi = base + "psi"
    pix = base + "pix"
    ls_map = base + "ls_map"
    pixel_area = base + "pixel_area"
    pixel_area2 = base + "pixel_area2"
    offs = base + "offs"
    coffs = base + "coffs"
    coffsets = base + "coffsets"
    snr = base + "snr"
    ellipse_pixel_area = base + "ellipse_pixel_area"
    ratio_sigma0 = base + "ratio_sigma0"

    # read image parameter file for meta information
    par = ReadPar(master + ".par")

    incidence = str(int(float(par.incidence_angle)))

    outname_base = outname_base + "_" + incidence

    ######################################################################
    # colelct srtm file sand mosaic them

    # define a name for the output mosaic
    name_srtm = os.path.join(tempdir, "srtm")

    # collect srtm tiles (if tiles are not found in the defined srtm directory, they are automatically downloaded to the temporary directory)
    targets = srtm.hgt_collect([x + ".par" for x in files_mli],
                               tempdir,
                               demdir=srtmdir)

    print "preparing SRTM data..."
    srtm.mosaic(targets, name_srtm)

    # interpolate data gaps
    srtm.fill(name_srtm, name_srtm + "_fill", path_log, replace=True)
    name_srtm += "_fill"

    # project DEM to UTM
    if transform:
        srtm.transform(name_srtm, name_srtm + "_utm")
        name_srtm += "_utm"
    ######################################################################
    # create DEM products
    print "sar image simulation..."
    try:
        run([
            "gc_map", master + ".par", "-", name_srtm + ".par", name_srtm,
            dem_seg + ".par", dem_seg, lut, dem_ovs, dem_ovs, sim_sar, u, v,
            inc, psi, pix, ls_map, 8, func_interp
        ],
            logpath=path_log)
    except IOError:
        print "...failed"
        return

    ######################################################################
    print "initial pixel area estimation..."
    run([
        "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut, ls_map,
        inc, pixel_area
    ],
        logpath=path_log)

    ######################################################################
    print "exact offset estimation..."
    try:
        inlist = ["", "0 0", "100 100", "128 128", 7.0]
        run(["create_diff_par", master + ".par", "-", master + "_diff.par", 1],
            inlist=inlist,
            logpath=path_log)
        run([
            "offset_pwrm", master, pixel_area, master + "_diff.par", offs, snr,
            128, 128, offs + ".txt", "-", 200, 200, 7.0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "computation of offset polynomials..."
    try:
        run([
            "offset_fitm", offs, snr, master + "_diff.par", coffs, coffsets,
            "-", 4, 0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "supplementing lookuptable with offset polynomials..."
    try:
        sim_width = ReadPar(dem_seg + ".par").width
        run(["gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine, 0],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "refined pixel area estimation..."
    try:
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut_fine,
            ls_map, inc, pixel_area2
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "radiometric calibration and normalization..."
    try:
        slc_width = ReadPar(master + ".par").range_samples
        run([
            "radcal_MLI", master, master + ".par", "-", master + "_cal", "-",
            0, 0, 1, 0.0, "-", ellipse_pixel_area
        ],
            logpath=path_log)
        run([
            "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0, slc_width,
            1, 1
        ],
            logpath=path_log)
        for item in files_mli:
            run([
                "product", item, ratio_sigma0, item + "_pixcal", slc_width, 1,
                1
            ],
                logpath=path_log)
    except:
        print "...failed"
        return
    ######################################################################
    print "backward geocoding, normalization and conversion to dB..."
    for item in files_mli:
        run([
            "geocode_back", item + "_pixcal", slc_width, lut_fine,
            item + "_geo", sim_width, 0, func_geoback
        ],
            logpath=path_log)

        run([
            "lin_comb", "1", item + "_geo", 0,
            math.cos(math.radians(float(par.incidence_angle))),
            item + "_geo_flat", sim_width
        ],
            logpath=path_log)
        run([
            "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
            sim_width
        ],
            logpath=path_log)

    ######################################################################

    print "creating final tiff files..."
    for item in finder(tempdir, ["*_geo_norm"]):
        polarization = re.findall("[HV]{2}", os.path.basename(item))[0].lower()
        outname = outname_base + "_" + polarization
        run([
            "data2geotiff", dem_seg + ".par", item, 2,
            outname + "_geocoded_norm.tif"
        ],
            logpath=path_log)
        annotation_dir = os.path.join(tempdir, "annotation")
        annotation = os.path.join(annotation_dir, [
            x for x in os.listdir(annotation_dir)
            if polarization in os.path.basename(x)
        ][0])
        os.rename(annotation, outname + "_annotation.xml")

    ######################################################################
    print "cleaning up..."
    # copy, rename and edit quicklook kml and png
    shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                    outname_base + "_quicklook.kml")
    shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                    outname_base + "_quicklook.png")
    with open(outname_base + "_quicklook.kml", "r") as infile:
        kml = infile.read().replace(
            "quick-look.png",
            os.path.basename(outname_base + "_quicklook.png"))
    with open(outname_base + "_quicklook.kml", "w") as outfile:
        outfile.write(kml)

    if not intermediates:
        shutil.rmtree(tempdir)

    if logfiles:
        os.rename(path_log, outname_base + "_log")

    print "...done:", asctime()
    print "##########################################"
Exemplo n.º 13
0
"""
input: SLC and RSLC file (to be passed by executing the script, i.e. python interferogram.py SLC RSLC
The following tasks are performed by executing this script:
-reading of a parameter file interferogram.par
--see object par for necessary values; file is automatically created by starting the script via the GUI
-if necessary, creation of output and logfile directories
-check whether corresponding coregistration offset file  exists
-interferogram generation
"""

import re
import os

from ancillary import finder, ReadPar, run

par = ReadPar(os.path.join(os.getcwd(), "PAR/interferogram.par"), type="exe")

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

offsets = finder(path_out, ["*off"])

if len(offsets) > 0:
    print "#############################################"
    print "interferogram generation started..."
    for name_off in offsets:
        name_int = name_off[:-3] + "int"
Exemplo n.º 14
0
# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating huynen decomposition..."
counter = 0
for scene in tuples:
    if len(
            union(["HH_slc", "VV_slc", "HV_slc", "t11", "t12", "t13"],
                  scene.__dict__.keys())) == 6:
        print scene.basename
        for i in range(1, 4):
            run([
                "HUYNEN_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc,
                scene.t11, scene.t12, scene.t13,
                ReadPar(scene.HH_slc + ".par").range_samples, scene.basename,
                str(i)
            ], os.path.dirname(scene.t11), path_log)
        counter += 1
if counter == 0:
    print "no appropriate scenes with existing coherence matrix found"
else:
    # rename files to consistent pattern
    for pattern in ["*.t*", "*.im", "*.re"]:
        for filename in finder(os.getcwd(), [pattern]):
            os.rename(
                filename,
                filename.replace(pattern.strip("*"),
                                 pattern.strip("*").replace(".", "_")))

print "...done"
Exemplo n.º 15
0
--crop these images to their common extent (using R package raster)
--swap bytes of files processed in R back to big endian for use in GAMMA
--write textfile table containing in one column the processing candidate and in the other the name of the filtered image
--perform multitemporal filtering (temp-filt) using the newly created textfile table
"""

import sys

import re
import os
import subprocess as sp

from ancillary import ReadPar, run, grouping, dissolve, HDRobject, hdr

# read processing parameter textfile
par = ReadPar(os.path.join(os.getcwd(), "PAR/tempfilter.par"), type="exe")

# use topographically normalized images?
tnorm = True if par.topographic_normalization == "True" else False

# define (and create) directory for logfile
path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

# create list of scene tuple objects
tuples = grouping()

counter = 0
for tag in [
        "HH", "VV", "HV", "VH", "pauli_alpha", "pauli_beta", "pauli_gamma",
Exemplo n.º 16
0
for scene in scenes:
    print "----------"
    # read leader file for meta information
    with open(os.path.join(scene, "LEA_01.001"), "r") as infile:
        text = [line for line in infile]
    text = "".join(text)

    # extract frame id
    frame_index = re.search("FRAME=", text).end()
    frame = text[frame_index:frame_index+4]

    tempname = os.path.join(os.getcwd(), "temp")
    print "importing..."
    run(["par_ESA_ERS", "LEA_01.001", tempname+".par", "DAT_01.001", tempname], scene, path_log, [""])
    par = ReadPar(tempname+".par")

    date = "".join([format(int(x), "02d") for x in par.date[0:3]])
    timestamp = date+"T"+time.strftime("%H%M%S", time.gmtime(round(float(par.center_time[0]))))
    outname = par.sensor+"_"+frame+"_"+timestamp+"_VV_slc"
    path_out = os.path.join(os.getcwd(), outname[:-7])
    if not os.path.exists(path_out):
        print outname
        os.makedirs(path_out)
        os.rename(tempname, os.path.join(path_out, outname))
        os.rename(tempname+".par", os.path.join(path_out, outname+".par"))
    else:
        print "scene", outname, "already imported; removing temporary files"
        os.remove(tempname)
        os.remove(tempname+".par")
    outname = os.path.join(path_out, outname)
Exemplo n.º 17
0
path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating fd3c decomposition..."

for scene in tuples:
    if len({"HH_slc_cal", "VV_slc_cal", "HV_slc_cal", "t13", "HH_slc_cal_mli"}
           & set(scene.__dict__.keys())) == 5:
        print scene.basename
        rlks = ReadPar(scene.HH_mli + ".par").range_looks
        azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks
        run([
            "FD3C_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.t13,
            ReadPar(scene.HH_slc + ".par").range_samples, scene.basename, rlks,
            azlks
        ], os.path.dirname(scene.t13), path_log)
        for tag in ["_fdd_pd", "_fdd_ps", "_fdd_pv"]:
            hdr(
                scene.HH_mli + ".par",
                os.path.join(os.path.dirname(scene.t13), scene.basename) +
                tag + ".hdr")
# rename files to consistent pattern
for pattern in ["*.fdd*"]:
    for filename in finder(os.getcwd(), [pattern]):
        os.rename(
Exemplo n.º 18
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles, intermediates,
         verbose):

    with tempfile.NamedTemporaryFile(delete=False, dir=outdir) as mainlog:

        # Definition geocode_back interpolation function
        # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
        # 0 - Nearest Neighbor
        # 1 - Bicubic Spline
        # 2 - Bicubic Spline-Log
        func_geoback = 2

        # function for interpolation of layover/shadow/foreshortening/DEM gaps
        # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
        func_interp = 0

        # Definition of the multi-look factor
        # Enter the number of looks you want to use in range and azimuth"
        # Try to achieve Level 1.5 azimuth pixel dimension and squarish pixels in GR image"
        # number of looks in range
        ml_rg = 4
        # number of looks in azimuth
        ml_az = 2

        # DEM oversampling factor"
        # for S1 GRDH: final resolution: 20m
        # 30m SRTM:
        # dem_ovs = 1.5
        # 90m SRTM:
        dem_ovs = 5

        message = "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
            zipfile[:-4], asctime())
        print message if verbose else mainlog.writelines(message)
        if not verbose:
            os.rename(mainlog.name, os.path.join(outdir, "main.log"))
        ######################################################################

        pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

        # unzip the dataset
        try:
            with zf.ZipFile(zipfile, "r") as z:
                scene = sorted(z.namelist())[0].strip("/")
                match = re.match(pattern, scene)
                orbit = "D" if float(
                    re.findall("[0-9]{6}",
                               match.group("start"))[1]) < 120000 else "A"
                outname_base = "_".join([
                    os.path.join(outdir, match.group("sat")),
                    match.group("beam"),
                    match.group("start").replace("T", "_"), orbit
                ])

                if not os.path.exists(os.path.join(tempdir, scene)) and len(
                        finder(outdir, [os.path.basename(outname_base)],
                               regex=True)) == 0:
                    if not z.testzip():
                        if verbose:
                            print "unzipping data..."
                        # print z.testzip()
                        z.extractall(tempdir)
                        tempdir = os.path.join(tempdir, scene)
                    else:
                        print "Corrupt zip"
                        return
                else:
                    print "file already imported/processed"
                    return
        except ImportError:
            print "...skipped"
            return

        # create logfile folder if this option was selected
        if logfiles:
            path_log = outname_base + "_log"
            if os.path.exists(path_log):
                shutil.rmtree(path_log)
            os.makedirs(path_log)
        else:
            path_log = None

        ######################################################################
        print "converting to GAMMA format..."
        try:
            run([
                "/usr/local/bin/python2.7",
                os.path.join(os.getcwd(), "reader.py"), tempdir
            ],
                outdir=tempdir,
                logpath=path_log)
        except ImportError:
            print "...failed"
            return

        files_slc = finder(tempdir, ["*_slc"])
        if len(files_slc) > 0:
            if verbose:
                print "multilooking..."
            for item in files_slc:
                run([
                    "multi_look", item, item + ".par", item[:-3] + "mli",
                    item[:-3] + "mli.par", ml_rg, ml_az
                ],
                    logpath=path_log)

        files_mli = finder(tempdir, ["*_mli"])

        master = files_mli[0]

        base = master[:-3]
        dem_seg = base + "dem"
        lut = base + "lut"
        lut_fine = base + "lut_fine"
        sim_sar = base + "sim_sar"
        u = base + "u"
        v = base + "v"
        inc = base + "inc"
        psi = base + "psi"
        pix = base + "pix"
        ls_map = base + "ls_map"
        pixel_area = base + "pixel_area"
        pixel_area2 = base + "pixel_area2"
        offs = base + "offs"
        coffs = base + "coffs"
        coffsets = base + "coffsets"
        snr = base + "snr"
        ellipse_pixel_area = base + "ellipse_pixel_area"
        ratio_sigma0 = base + "ratio_sigma0"

        # read image parameter file for meta information

        par = ReadPar(master + ".par")

        incidence = str(int(float(par.incidence_angle)))

        outname_base = outname_base + "_" + incidence

        ######################################################################
        if verbose:
            print "mosaicing SRTM data..."

        name_srtm = os.path.join(tempdir, "srtm")

        # extract corner coordinates from gamma parameter files and concatenate names of required hgt files
        lat, lon = srtm.latlon([x + ".par" for x in files_mli])
        target_ids = srtm.hgt(lat, lon)

        # search for required tiles in the defined srtm directory
        targets = finder(srtmdir, target_ids)

        # copy hgt files to temporary directory
        if len(targets) > 0:
            for item in targets:
                shutil.copy(item, tempdir)
            targets = finder(tempdir, target_ids)
        else:
            print "...failed"
            return

        # create gamma parameter files for all DEMs
        srtm.dempar(targets)

        # mosaic hgt files
        srtm.mosaic(targets, name_srtm)

        # interpolate data gaps
        srtm.replace(name_srtm, name_srtm + "_fill", path_log)
        os.remove(name_srtm)
        os.remove(name_srtm + ".par")
        name_srtm += "_fill"

        # project DEM to UTM
        if transform:
            if verbose:
                print "reprojecting DEM..."
            srtm.transform(name_srtm, name_srtm + "_utm")
            name_srtm += "_utm"

        # remove hgt files from temporary directory
        for item in targets:
            os.remove(item)
            os.remove(item + ".par")
        ######################################################################
        # create DEM products; command is automatically chosen based on  SAR imagery parameter file entries (flawless functioning yet to be tested)
        if verbose:
            print "sar image simulation..."
        try:
            if ReadPar(master + ".par").image_geometry == "GROUND_RANGE":
                run([
                    "gc_map_grd", master + ".par", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
            else:
                run([
                    "gc_map", master + ".par", "-", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
        except IOError:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "initial pixel area estimation..."
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut,
            ls_map, inc, pixel_area
        ],
            logpath=path_log)

        ######################################################################
        if verbose:
            print "exact offset estimation..."
        try:
            inlist = ["", "0 0", "100 100", "128 128", "7.0"]
            run([
                "create_diff_par", master + ".par", "-", master + "_diff.par",
                1
            ],
                inlist=inlist,
                logpath=path_log)
            run([
                "offset_pwrm", master, pixel_area, master + "_diff.par", offs,
                snr, 128, 128, offs + ".txt", "-", 200, 200, 7.0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "computation of offset polynomials..."
        try:
            run([
                "offset_fitm", offs, snr, master + "_diff.par", coffs,
                coffsets, "-", 4, 0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "supplementing lookuptable with offset polynomials..."
        try:
            sim_width = ReadPar(dem_seg + ".par").width
            run([
                "gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine,
                0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "refined pixel area estimation..."
        try:
            run([
                "pixel_area", master + ".par", dem_seg + ".par", dem_seg,
                lut_fine, ls_map, inc, pixel_area2
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "radiometric calibration and normalization..."
        try:
            slc_width = ReadPar(master + ".par").range_samples
            run([
                "radcal_MLI", master, master + ".par", "-", master + "_cal",
                "-", 0, 0, 1, 0.0, "-", ellipse_pixel_area
            ],
                logpath=path_log)
            run([
                "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0,
                slc_width, 1, 1
            ],
                logpath=path_log)
            for item in files_mli:
                run([
                    "product", item, ratio_sigma0, item + "_pixcal", slc_width,
                    1, 1
                ],
                    logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "backward geocoding, normalization and conversion to dB..."
        for item in files_mli:
            run([
                "geocode_back", item + "_pixcal", slc_width, lut_fine,
                item + "_geo", sim_width, 0, func_geoback
            ],
                logpath=path_log)

            run([
                "lin_comb", "1", item + "_geo", 0,
                math.cos(math.radians(float(par.incidence_angle))),
                item + "_geo_flat", sim_width
            ],
                logpath=path_log)
            run([
                "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
                sim_width
            ],
                logpath=path_log)

        ######################################################################

        print "creating final tiff files..."
        for item in finder(tempdir, ["*_geo_norm"]):
            polarization = re.findall("[HV]{2}",
                                      os.path.basename(item))[0].lower()
            outname = outname_base + "_" + polarization
            run([
                "data2geotiff", dem_seg + ".par", item, 2,
                outname + "_geocoded_norm.tif"
            ],
                logpath=path_log)
            annotation_dir = os.path.join(tempdir, "annotation")
            annotation = os.path.join(annotation_dir, [
                x for x in os.listdir(annotation_dir)
                if polarization in os.path.basename(x)
            ][0])
            os.rename(annotation, outname + "_annotation.xml")

        ######################################################################
        if verbose:
            print "cleaning up..."
        # copy, rename and edit quicklook kml and png
        shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                        outname_base + "_quicklook.kml")
        shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                        outname_base + "_quicklook.png")
        with open(outname_base + "_quicklook.kml", "r") as infile:
            kml = infile.read().replace(
                "quick-look.png",
                os.path.basename(outname_base + "_quicklook.png"))
        with open(outname_base + "_quicklook.kml", "w") as outfile:
            outfile.write(kml)

        if not intermediates:
            shutil.rmtree(tempdir)

        if logfiles:
            os.rename(path_log, outname_base + "_log")

        if verbose:
            print "...done:", asctime()
            print "##########################################"
Exemplo n.º 19
0
input: SLC and RSLC file (to be passed by executing the script, i.e. python baseline.py SLC RSLC
The following tasks are performed by executing this script:
-reading of a parameter file baseline.par
--see object par for necessary values; file is automatically created by starting the script via the GUI
-if necessary, creation of output and logfile directories
-check whether corresponding coregistration offset file and interferogram exist
-execution of initial baseline estimation and calculation of perpendicular and parallel components
"""

import re
import os

from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par"), type="exe")

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

interferograms = finder(path_out, ["*int"])

if len(interferograms) > 0:
    print "#############################################"
    print "baseline estimation started..."
    for name_int in interferograms:
        name_off = name_int[:-3] + "off"
Exemplo n.º 20
0
"""

import os
import re

from ancillary import finder, run, ReadPar
from isp_parameterfile import ISPPar

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.makedirs(path)

par = ReadPar(os.path.join(os.getcwd(), "PAR", "coherence_ad.par"), type="exe")

# retrieve additional arguments from script call
differential = True if par.differential == "True" else False

# find flattened interferograms
list_flt = finder(path_out, ["*_int_diff"]) if differential else finder(
    path_out, ["*_flt"])

if len(list_flt) > 0:
    print "#############################################"
    print "estimation started..."

    for name_flt in list_flt:
        # extract timestamps from flt name
        id_pwr = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)",
Exemplo n.º 21
0
import os

from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/cal_slc.par"))

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/GEO/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.makedirs(path)

list_K_dB = {"PSR1": "-115.0"}

list_slc = finder(os.getcwd(), ["*_slc"])

if len(list_slc) > 0:
    print "#############################################"
    print "calibration started..."

    for name_slc in list_slc:
        sensor = name_slc.split("_")[0]
        if sensor in list_K_dB:
            K_dB = list_K_dB[sensor]
        else:
            print "calibration for sensor " + sensor + "not implemented"

        name_cslc = name_slc[:-3] + "cslc"
Exemplo n.º 22
0
##############################################################
import sys

import os

from isp_parameterfile import ISPPar
from ancillary import run, hdr, Spacing, ReadPar

# receive input file
data = sys.argv[2]
meta = data + ".par"

print data

# read parameter file and compute multilooking parameters
par = ReadPar(meta)
mlk = Spacing(par, sys.argv[1])

# define (and create) directories for logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

print "slc range pixel spacing (slant, ground):", par.range_pixel_spacing, mlk.groundRangePS
print "slc azimuth pixel spacing:", par.azimuth_pixel_spacing
print "number of looks looks (range, azimuth):", mlk.rlks, mlk.azlks
print "mli range pixel spacing (slant, ground):", int(mlk.rlks) * float(
    par.range_pixel_spacing), int(mlk.rlks) * mlk.groundRangePS
print "mli azimuth pixel spacing:", int(mlk.azlks) * float(
    par.azimuth_pixel_spacing)
print "-----------"