Ejemplo n.º 1
0
def main():
    print "#############################################"
    print "preparing SRTM mosaic:"
    # read parameter textfile
    par = ReadPar(os.path.join(os.getcwd(), "PAR/srtm_preparation.par"))

    demdir = None
    if hasattr(par, "SRTM_archive"):
        if os.path.isdir(par.SRTM_archive):
            demdir = par.SRTM_archive

    parfiles = finder(os.getcwd(), ["*slc.par", "*mli.par", "*cal.par"])

    # define (and create) directories for processing results and logfile
    path_dem = os.path.join(os.getcwd(), "DEM/")
    path_log = os.path.join(os.getcwd(), "LOG/GEO/")
    for path in [path_log, path_dem]:
        if not os.path.exists(path):
            os.makedirs(path)

    # find SRTM tiles for mosaicing
    demlist = hgt_collect(parfiles, path_dem, demdir=demdir)

    # remove files created by this function
    for item in finder(path_dem, ["mosaic*", "dem*", "*.par"]):
        os.remove(item)

    if len(demlist) == 0:
        raise IOError("no hgt files found")

    # perform mosaicing if multiple files are found
    if len(demlist) > 1:
        print "mosaicing..."
        dem = os.path.join(path_dem, "mosaic")
        mosaic(demlist, dem)
    else:
        dem = demlist[0]
        dempar(dem)
    fill(dem, os.path.join(path_dem, "dem_final"), path_log)
    dem = os.path.join(path_dem, "dem_final")

    # transform DEM to UTM
    if par.utm == "True":
        print "transforming to UTM..."
        transform(dem, dem + "_utm", int(par.targetres))
        hdr(dem + "_utm.par")
    print "...done"
    print "#############################################"
Ejemplo n.º 2
0
import re
import os

from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par"), type="exe")

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

interferograms = finder(path_out, ["*int"])

if len(interferograms) > 0:
    print "#############################################"
    print "baseline estimation started..."
    for name_int in interferograms:
        name_off = name_int[:-3] + "off"
        name_base = name_int[:-3] + "base_init"
        print os.path.basename(name_base)

        slc = finder(os.getcwd(), [
            re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?",
                       name_int)[0]
        ])[0]
        # rslc = finder(os.getcwd(), [re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)", name_int)[1]+"_reg"])[0]
        rslc = name_int[:-3] + "reg"
Ejemplo n.º 3
0
import subprocess as sp
import sys
from ancillary import finder

script = "/pvdata2/john/GAMMA/gammaGUI/S1_main.py"
zipdir = "/pvdata2/john/RADAR/Sentinel/originals"
tempdir = "/pvdata2/john/RADAR/Sentinel/test"
outdir = "/pvdata2/john/RADAR/Sentinel/test_out"
srtmdir = "/pvdata2/john/RADAR/Sentinel/srtm"

files = finder(zipdir, ["*.zip"])

for scene in files:
    if "GRDH" in scene:
        sp.check_call([
            sys.executable, script, "-l", "-i", scene, tempdir, outdir, srtmdir
        ])
Ejemplo n.º 4
0
from ancillary import grouping, run, finder, ReadPar

path_log = os.path.join(os.getcwd(), "LOG/LAT/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

par = ReadPar(os.path.join(os.getcwd(), "PAR/mat_cov.par"))

# create list of scene tuple objects
tuples = grouping()

print "#############################################"
print "creating covariance matrices..."

for scene in tuples:
    if len(set(["HH_slc", "VV_slc", "HV_slc", "HH_mli"]) & set(scene.__dict__.keys())) == 4:
        print scene.basename
        rlks = ReadPar(scene.HH_mli+".par").range_looks
        azlks = ReadPar(scene.HH_mli+".par").azimuth_looks
        path_out = os.path.join(os.path.dirname(scene.HH_slc), "POL/")
        if not os.path.exists(path_out):
            os.makedirs(path_out)
        run(["polcovar", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.HH_slc+".par", scene.HV_slc+".par", scene.VV_slc+".par", os.path.basename(scene.basename),
             os.path.basename(scene.basename)+"_mat_cov.par", rlks, azlks], path_out, path_log)

# rename files to consistent pattern
for filename in finder(os.getcwd(), ["*.c*"]):
    os.rename(filename, filename.replace(".c", "_c"))

print "...done"
print "#############################################"
Ejemplo n.º 5
0
def hgt_collect(parfiles, outdir, demdir=None):

    # concatenate required hgt tile names
    target_ids = hgt(parfiles)

    targets = []

    # define server and its subdirectories
    # tiff alternative (not implemented): ftp://srtm.csi.cgiar.org/SRTM_v41/SRTM_Data_GeoTIFF/
    server = "http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/"
    continents = [
        "Africa", "Australia", "Eurasia", "Islands", "North_America",
        "South_America"
    ]
    pattern = "[NS][0-9]{2}[EW][0-9]{3}"

    # if an additional dem directory has been defined, check this directory for required hgt tiles
    if demdir is not None:
        for item in finder(demdir, target_ids):
            targets.append(item)

    # check for additional potentially existing hgt tiles in the defined output directory
    for item in [
            os.path.join(outdir, x) for x in target_ids
            if os.path.isfile(os.path.join(outdir, x))
            and not re.search(x, "\n".join(targets))
    ]:
        targets.append(item)

    for item in targets:
        print item

    # search server for all required tiles, which were not found in the local directories
    if len(targets) < len(target_ids):
        print "searching for SRTM tiles on the server..."
        onlines = []
        for continent in continents:
            path = os.path.join(server, continent)
            response = urlopen(path).read()
            for item in re.findall(pattern + "[.]hgt.zip", response):
                outname = re.findall(pattern, item)[0] + ".hgt"
                if outname in target_ids and outname not in [
                        os.path.basename(x) for x in targets
                ]:
                    onlines.append(os.path.join(path, item))
        onlines = list(set(onlines))

        for item in onlines:
            print item

        # if additional tiles have been found online, download and unzip them to the local directory
        if len(onlines) > 0:
            print "downloading {0} SRTM tiles...".format(len(onlines))
            for candidate in onlines:
                localname = os.path.join(
                    outdir,
                    re.findall(pattern, candidate)[0] + ".hgt")
                infile = urlopen(candidate)
                with open(localname + ".zip", "wb") as outfile:
                    outfile.write(infile.read())
                infile.close()
                with zipfile.ZipFile(localname + ".zip", "r") as z:
                    z.extractall(outdir)
                os.remove(localname + ".zip")
                targets.append(localname)
    return targets
Ejemplo n.º 6
0
    the indicated time offset. (Gamma Remote Sensing (2003): ISP Reference Manual)
    """

import re
import os.path

from ancillary import finder, run

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

list_int = finder(path_out, ["*_int"])

if len(list_int) > 0:
    print "#############################################"
    print "interferogram flattening started..."

    for name_int in list_int:
        slc_par = finder(os.getcwd(), [
            re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)",
                       name_int)[0]
        ])[0] + ".par"
        name_off = name_int[:-3] + "off"
        if not os.path.isfile(name_off):
            raise IOError("offset file missing")

        if os.path.isfile(name_int[:-3] + "base_refine"):
Ejemplo n.º 7
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles, intermediates,
         verbose):

    with tempfile.NamedTemporaryFile(delete=False, dir=outdir) as mainlog:

        # Definition geocode_back interpolation function
        # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
        # 0 - Nearest Neighbor
        # 1 - Bicubic Spline
        # 2 - Bicubic Spline-Log
        func_geoback = 2

        # function for interpolation of layover/shadow/foreshortening/DEM gaps
        # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
        func_interp = 0

        # Definition of the multi-look factor
        # Enter the number of looks you want to use in range and azimuth"
        # Try to achieve Level 1.5 azimuth pixel dimension and squarish pixels in GR image"
        # number of looks in range
        ml_rg = 4
        # number of looks in azimuth
        ml_az = 2

        # DEM oversampling factor"
        # for S1 GRDH: final resolution: 20m
        # 30m SRTM:
        # dem_ovs = 1.5
        # 90m SRTM:
        dem_ovs = 5

        message = "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
            zipfile[:-4], asctime())
        print message if verbose else mainlog.writelines(message)
        if not verbose:
            os.rename(mainlog.name, os.path.join(outdir, "main.log"))
        ######################################################################

        pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

        # unzip the dataset
        try:
            with zf.ZipFile(zipfile, "r") as z:
                scene = sorted(z.namelist())[0].strip("/")
                match = re.match(pattern, scene)
                orbit = "D" if float(
                    re.findall("[0-9]{6}",
                               match.group("start"))[1]) < 120000 else "A"
                outname_base = "_".join([
                    os.path.join(outdir, match.group("sat")),
                    match.group("beam"),
                    match.group("start").replace("T", "_"), orbit
                ])

                if not os.path.exists(os.path.join(tempdir, scene)) and len(
                        finder(outdir, [os.path.basename(outname_base)],
                               regex=True)) == 0:
                    if not z.testzip():
                        if verbose:
                            print "unzipping data..."
                        # print z.testzip()
                        z.extractall(tempdir)
                        tempdir = os.path.join(tempdir, scene)
                    else:
                        print "Corrupt zip"
                        return
                else:
                    print "file already imported/processed"
                    return
        except ImportError:
            print "...skipped"
            return

        # create logfile folder if this option was selected
        if logfiles:
            path_log = outname_base + "_log"
            if os.path.exists(path_log):
                shutil.rmtree(path_log)
            os.makedirs(path_log)
        else:
            path_log = None

        ######################################################################
        print "converting to GAMMA format..."
        try:
            run([
                "/usr/local/bin/python2.7",
                os.path.join(os.getcwd(), "reader.py"), tempdir
            ],
                outdir=tempdir,
                logpath=path_log)
        except ImportError:
            print "...failed"
            return

        files_slc = finder(tempdir, ["*_slc"])
        if len(files_slc) > 0:
            if verbose:
                print "multilooking..."
            for item in files_slc:
                run([
                    "multi_look", item, item + ".par", item[:-3] + "mli",
                    item[:-3] + "mli.par", ml_rg, ml_az
                ],
                    logpath=path_log)

        files_mli = finder(tempdir, ["*_mli"])

        master = files_mli[0]

        base = master[:-3]
        dem_seg = base + "dem"
        lut = base + "lut"
        lut_fine = base + "lut_fine"
        sim_sar = base + "sim_sar"
        u = base + "u"
        v = base + "v"
        inc = base + "inc"
        psi = base + "psi"
        pix = base + "pix"
        ls_map = base + "ls_map"
        pixel_area = base + "pixel_area"
        pixel_area2 = base + "pixel_area2"
        offs = base + "offs"
        coffs = base + "coffs"
        coffsets = base + "coffsets"
        snr = base + "snr"
        ellipse_pixel_area = base + "ellipse_pixel_area"
        ratio_sigma0 = base + "ratio_sigma0"

        # read image parameter file for meta information

        par = ReadPar(master + ".par")

        incidence = str(int(float(par.incidence_angle)))

        outname_base = outname_base + "_" + incidence

        ######################################################################
        if verbose:
            print "mosaicing SRTM data..."

        name_srtm = os.path.join(tempdir, "srtm")

        # extract corner coordinates from gamma parameter files and concatenate names of required hgt files
        lat, lon = srtm.latlon([x + ".par" for x in files_mli])
        target_ids = srtm.hgt(lat, lon)

        # search for required tiles in the defined srtm directory
        targets = finder(srtmdir, target_ids)

        # copy hgt files to temporary directory
        if len(targets) > 0:
            for item in targets:
                shutil.copy(item, tempdir)
            targets = finder(tempdir, target_ids)
        else:
            print "...failed"
            return

        # create gamma parameter files for all DEMs
        srtm.dempar(targets)

        # mosaic hgt files
        srtm.mosaic(targets, name_srtm)

        # interpolate data gaps
        srtm.replace(name_srtm, name_srtm + "_fill", path_log)
        os.remove(name_srtm)
        os.remove(name_srtm + ".par")
        name_srtm += "_fill"

        # project DEM to UTM
        if transform:
            if verbose:
                print "reprojecting DEM..."
            srtm.transform(name_srtm, name_srtm + "_utm")
            name_srtm += "_utm"

        # remove hgt files from temporary directory
        for item in targets:
            os.remove(item)
            os.remove(item + ".par")
        ######################################################################
        # create DEM products; command is automatically chosen based on  SAR imagery parameter file entries (flawless functioning yet to be tested)
        if verbose:
            print "sar image simulation..."
        try:
            if ReadPar(master + ".par").image_geometry == "GROUND_RANGE":
                run([
                    "gc_map_grd", master + ".par", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
            else:
                run([
                    "gc_map", master + ".par", "-", name_srtm + ".par",
                    name_srtm, dem_seg + ".par", dem_seg, lut, dem_ovs,
                    dem_ovs, sim_sar, u, v, inc, psi, pix, ls_map, 8,
                    func_interp
                ],
                    logpath=path_log)
        except IOError:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "initial pixel area estimation..."
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut,
            ls_map, inc, pixel_area
        ],
            logpath=path_log)

        ######################################################################
        if verbose:
            print "exact offset estimation..."
        try:
            inlist = ["", "0 0", "100 100", "128 128", "7.0"]
            run([
                "create_diff_par", master + ".par", "-", master + "_diff.par",
                1
            ],
                inlist=inlist,
                logpath=path_log)
            run([
                "offset_pwrm", master, pixel_area, master + "_diff.par", offs,
                snr, 128, 128, offs + ".txt", "-", 200, 200, 7.0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "computation of offset polynomials..."
        try:
            run([
                "offset_fitm", offs, snr, master + "_diff.par", coffs,
                coffsets, "-", 4, 0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "supplementing lookuptable with offset polynomials..."
        try:
            sim_width = ReadPar(dem_seg + ".par").width
            run([
                "gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine,
                0
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "refined pixel area estimation..."
        try:
            run([
                "pixel_area", master + ".par", dem_seg + ".par", dem_seg,
                lut_fine, ls_map, inc, pixel_area2
            ],
                logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "radiometric calibration and normalization..."
        try:
            slc_width = ReadPar(master + ".par").range_samples
            run([
                "radcal_MLI", master, master + ".par", "-", master + "_cal",
                "-", 0, 0, 1, 0.0, "-", ellipse_pixel_area
            ],
                logpath=path_log)
            run([
                "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0,
                slc_width, 1, 1
            ],
                logpath=path_log)
            for item in files_mli:
                run([
                    "product", item, ratio_sigma0, item + "_pixcal", slc_width,
                    1, 1
                ],
                    logpath=path_log)
        except:
            print "...failed"
            return

        ######################################################################
        if verbose:
            print "backward geocoding, normalization and conversion to dB..."
        for item in files_mli:
            run([
                "geocode_back", item + "_pixcal", slc_width, lut_fine,
                item + "_geo", sim_width, 0, func_geoback
            ],
                logpath=path_log)

            run([
                "lin_comb", "1", item + "_geo", 0,
                math.cos(math.radians(float(par.incidence_angle))),
                item + "_geo_flat", sim_width
            ],
                logpath=path_log)
            run([
                "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
                sim_width
            ],
                logpath=path_log)

        ######################################################################

        print "creating final tiff files..."
        for item in finder(tempdir, ["*_geo_norm"]):
            polarization = re.findall("[HV]{2}",
                                      os.path.basename(item))[0].lower()
            outname = outname_base + "_" + polarization
            run([
                "data2geotiff", dem_seg + ".par", item, 2,
                outname + "_geocoded_norm.tif"
            ],
                logpath=path_log)
            annotation_dir = os.path.join(tempdir, "annotation")
            annotation = os.path.join(annotation_dir, [
                x for x in os.listdir(annotation_dir)
                if polarization in os.path.basename(x)
            ][0])
            os.rename(annotation, outname + "_annotation.xml")

        ######################################################################
        if verbose:
            print "cleaning up..."
        # copy, rename and edit quicklook kml and png
        shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                        outname_base + "_quicklook.kml")
        shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                        outname_base + "_quicklook.png")
        with open(outname_base + "_quicklook.kml", "r") as infile:
            kml = infile.read().replace(
                "quick-look.png",
                os.path.basename(outname_base + "_quicklook.png"))
        with open(outname_base + "_quicklook.kml", "w") as outfile:
            outfile.write(kml)

        if not intermediates:
            shutil.rmtree(tempdir)

        if logfiles:
            os.rename(path_log, outname_base + "_log")

        if verbose:
            print "...done:", asctime()
            print "##########################################"
Ejemplo n.º 8
0
def main(zipfile, tempdir, outdir, srtmdir, transform, logfiles,
         intermediates):

    # Definition geocode_back interpolation function
    # method 1: negative values possible (e.g. in urban areas) - use method 2 to avoid this
    # 0 - Nearest Neighbor
    # 1 - Bicubic Spline
    # 2 - Bicubic Spline-Log
    func_geoback = 2

    # function for interpolation of layover/shadow/foreshortening/DEM gaps
    # 0: set to 0; 1: linear interpolation; 2: actual value; 3: nn-thinned
    func_interp = 0

    # define target resolution; multilooking factors will be computed automatically
    res_target = 20

    # set DEM resolution and compute oversampling factor
    res_dem = 100
    dem_ovs = res_dem // res_target

    print "##########################################\n%s\n-------------\nprocessing started: %s\n" % (
        zipfile[:-4], asctime())

    ######################################################################

    pattern = r"^(?P<sat>S1[AB])_(?P<beam>S1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_(?P<prod>SLC|GRD|OCN)(?:F|H|M|_)_(?:1|2)(?P<class>S|A)(?P<pols>SH|SV|DH|DV|HH|HV|VV|VH)_(?P<start>[0-9]{8}T[0-9]{6})_(?P<stop>[0-9]{8}T[0-9]{6})_(?:[0-9]{6})_(?:[0-9A-F]{6})_(?:[0-9A-F]{4})\.SAFE$"

    # unzip the dataset
    try:
        with zf.ZipFile(zipfile, "r") as z:
            scene = sorted(z.namelist())[0].strip("/")
            match = re.match(pattern, scene)
            orbit = "D" if float(
                re.findall("[0-9]{6}",
                           match.group("start"))[1]) < 120000 else "A"
            outname_base = "_".join([
                os.path.join(outdir, match.group("sat")),
                match.group("beam"),
                match.group("start").replace("T", "_"), orbit
            ])

            if not os.path.exists(os.path.join(tempdir, scene)) and len(
                    finder(outdir, [os.path.basename(outname_base)],
                           regex=True)) == 0:
                if not z.testzip():
                    print "unzipping data..."
                    # print z.testzip()
                    z.extractall(tempdir)
                else:
                    print "corrupt zip"
                    return
            else:
                print "file already imported/processed"
                return
            tempdir = os.path.join(tempdir, scene)
    except ImportError:
        print "...skipped"
        return

    # create logfile folder if this option was selected
    if logfiles:
        path_log = outname_base + "_log"
        if os.path.exists(path_log):
            shutil.rmtree(path_log)
        os.makedirs(path_log)
    else:
        path_log = None

    ######################################################################
    print "converting to GAMMA format..."
    try:
        run([sys.executable,
             os.path.join(os.getcwd(), "reader.py"), tempdir],
            outdir=tempdir,
            logpath=path_log)
    except ImportError:
        print "...failed"
        return

    # gather all imported files
    files_mli = finder(tempdir, ["*_mli"])

    # compute multilooking factors
    par = ReadPar(files_mli[0] + ".par")
    rlks = int(round(res_target / float(par.range_pixel_spacing)))
    azlks = int(round(res_target / float(par.azimuth_pixel_spacing)))

    # perform multilooking
    for item in files_mli:
        run([
            "multi_look_MLI", item, item + ".par", item[:-3] + "mli2",
            item[:-3] + "mli2.par", rlks, azlks
        ],
            logpath=path_log)

    # gather all newly created MLIs
    files_mli = finder(tempdir, ["*_mli2"])

    # select master image
    master = files_mli[0]

    base = "_".join(master.split("_")[:-1]) + "_"
    dem_seg = base + "dem"
    lut = base + "lut"
    lut_fine = base + "lut_fine"
    sim_sar = base + "sim_sar"
    u = base + "u"
    v = base + "v"
    inc = base + "inc"
    psi = base + "psi"
    pix = base + "pix"
    ls_map = base + "ls_map"
    pixel_area = base + "pixel_area"
    pixel_area2 = base + "pixel_area2"
    offs = base + "offs"
    coffs = base + "coffs"
    coffsets = base + "coffsets"
    snr = base + "snr"
    ellipse_pixel_area = base + "ellipse_pixel_area"
    ratio_sigma0 = base + "ratio_sigma0"

    # read image parameter file for meta information
    par = ReadPar(master + ".par")

    incidence = str(int(float(par.incidence_angle)))

    outname_base = outname_base + "_" + incidence

    ######################################################################
    # colelct srtm file sand mosaic them

    # define a name for the output mosaic
    name_srtm = os.path.join(tempdir, "srtm")

    # collect srtm tiles (if tiles are not found in the defined srtm directory, they are automatically downloaded to the temporary directory)
    targets = srtm.hgt_collect([x + ".par" for x in files_mli],
                               tempdir,
                               demdir=srtmdir)

    print "preparing SRTM data..."
    srtm.mosaic(targets, name_srtm)

    # interpolate data gaps
    srtm.fill(name_srtm, name_srtm + "_fill", path_log, replace=True)
    name_srtm += "_fill"

    # project DEM to UTM
    if transform:
        srtm.transform(name_srtm, name_srtm + "_utm")
        name_srtm += "_utm"
    ######################################################################
    # create DEM products
    print "sar image simulation..."
    try:
        run([
            "gc_map", master + ".par", "-", name_srtm + ".par", name_srtm,
            dem_seg + ".par", dem_seg, lut, dem_ovs, dem_ovs, sim_sar, u, v,
            inc, psi, pix, ls_map, 8, func_interp
        ],
            logpath=path_log)
    except IOError:
        print "...failed"
        return

    ######################################################################
    print "initial pixel area estimation..."
    run([
        "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut, ls_map,
        inc, pixel_area
    ],
        logpath=path_log)

    ######################################################################
    print "exact offset estimation..."
    try:
        inlist = ["", "0 0", "100 100", "128 128", 7.0]
        run(["create_diff_par", master + ".par", "-", master + "_diff.par", 1],
            inlist=inlist,
            logpath=path_log)
        run([
            "offset_pwrm", master, pixel_area, master + "_diff.par", offs, snr,
            128, 128, offs + ".txt", "-", 200, 200, 7.0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "computation of offset polynomials..."
    try:
        run([
            "offset_fitm", offs, snr, master + "_diff.par", coffs, coffsets,
            "-", 4, 0
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "supplementing lookuptable with offset polynomials..."
    try:
        sim_width = ReadPar(dem_seg + ".par").width
        run(["gc_map_fine", lut, sim_width, master + "_diff.par", lut_fine, 0],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "refined pixel area estimation..."
    try:
        run([
            "pixel_area", master + ".par", dem_seg + ".par", dem_seg, lut_fine,
            ls_map, inc, pixel_area2
        ],
            logpath=path_log)
    except:
        print "...failed"
        return

    ######################################################################
    print "radiometric calibration and normalization..."
    try:
        slc_width = ReadPar(master + ".par").range_samples
        run([
            "radcal_MLI", master, master + ".par", "-", master + "_cal", "-",
            0, 0, 1, 0.0, "-", ellipse_pixel_area
        ],
            logpath=path_log)
        run([
            "ratio", ellipse_pixel_area, pixel_area2, ratio_sigma0, slc_width,
            1, 1
        ],
            logpath=path_log)
        for item in files_mli:
            run([
                "product", item, ratio_sigma0, item + "_pixcal", slc_width, 1,
                1
            ],
                logpath=path_log)
    except:
        print "...failed"
        return
    ######################################################################
    print "backward geocoding, normalization and conversion to dB..."
    for item in files_mli:
        run([
            "geocode_back", item + "_pixcal", slc_width, lut_fine,
            item + "_geo", sim_width, 0, func_geoback
        ],
            logpath=path_log)

        run([
            "lin_comb", "1", item + "_geo", 0,
            math.cos(math.radians(float(par.incidence_angle))),
            item + "_geo_flat", sim_width
        ],
            logpath=path_log)
        run([
            "sigma2gamma", item + "_geo_flat", inc, item + "_geo_norm",
            sim_width
        ],
            logpath=path_log)

    ######################################################################

    print "creating final tiff files..."
    for item in finder(tempdir, ["*_geo_norm"]):
        polarization = re.findall("[HV]{2}", os.path.basename(item))[0].lower()
        outname = outname_base + "_" + polarization
        run([
            "data2geotiff", dem_seg + ".par", item, 2,
            outname + "_geocoded_norm.tif"
        ],
            logpath=path_log)
        annotation_dir = os.path.join(tempdir, "annotation")
        annotation = os.path.join(annotation_dir, [
            x for x in os.listdir(annotation_dir)
            if polarization in os.path.basename(x)
        ][0])
        os.rename(annotation, outname + "_annotation.xml")

    ######################################################################
    print "cleaning up..."
    # copy, rename and edit quicklook kml and png
    shutil.copyfile(os.path.join(tempdir, "preview", "map-overlay.kml"),
                    outname_base + "_quicklook.kml")
    shutil.copyfile(os.path.join(tempdir, "preview", "quick-look.png"),
                    outname_base + "_quicklook.png")
    with open(outname_base + "_quicklook.kml", "r") as infile:
        kml = infile.read().replace(
            "quick-look.png",
            os.path.basename(outname_base + "_quicklook.png"))
    with open(outname_base + "_quicklook.kml", "w") as outfile:
        outfile.write(kml)

    if not intermediates:
        shutil.rmtree(tempdir)

    if logfiles:
        os.rename(path_log, outname_base + "_log")

    print "...done:", asctime()
    print "##########################################"
Ejemplo n.º 9
0
lat = lon = []
with open(csvname, "r") as csvfile:
    for row in csv.DictReader(csvfile):
        if re.search(os.path.basename(zipfile), row["zipFilename"]):
            lat = [row["upperLeftLat"], row["lowerRightLat"]]
            lon = [row["upperLeftLon"], row["lowerRightLon"]]
            break

# concatenate names of required srtm tiles with convention "[NS][0-9]{2}[EW][0-9]{3}.hgt"
if len(lat) > 0 and len(lon) > 0:
    target_ids = hgt(lat, lon)
else:
    raise IOError("meta data entry missing")

# search for required tiles in the defined srtm directory
targets = finder(srtmdir, target_ids)

# check whether all required tiles were found and copy them to the working directory
if len(targets) < len(target_ids):
    raise IOError("missing hgt files")
else:
    for item in targets:
        shutil.copy(item, outdir)
targets = [os.path.join(outdir, x) for x in target_ids]

# create gamma parameter files for all DEMs
dempar(targets)

# perform mosaicing if necessary
if len(targets) > 1:
    mosaic(targets, name_dem)
Ejemplo n.º 10
0
from ancillary import finder, run, ReadPar

orbit_correct = True if sys.argv[-1] == "True" else False

# path to delft orbit files
path_delft = "/pvdata2/john/ancillary/ERS/ORBIT/delft"

# path to antenna correction files
path_cal = "/pvdata2/john/ancillary/ERS/CAL/ERS_antenna"

# define (and create) directory for logfile
path_log = os.path.join(os.getcwd(), "LOG/IMP/")
if not os.path.exists(path_log):
    os.makedirs(path_log)

scenes = [os.path.dirname(x) for x in finder(sys.argv[1], ["*LEA_01.001"])]

if len(scenes) == 0:
    raise IOError("no appropriate file found")

for scene in scenes:
    print "----------"
    # read leader file for meta information
    with open(os.path.join(scene, "LEA_01.001"), "r") as infile:
        text = [line for line in infile]
    text = "".join(text)

    # extract frame id
    frame_index = re.search("FRAME=", text).end()
    frame = text[frame_index:frame_index+4]
Ejemplo n.º 11
0
            int(targetres[0])
            int(targetres[1])
            targetres = ["-tr", targetres]
        except IOError:
            print "invalid resolution statement"
    else:
        raise IOError("invalid resolution statement")
else:
    targetres = []

nodata = ["-dstnodata", nodata]
format = ["-of", "ENVI"]
resampling = ["-r", resampling]

# find all files matching the defined pattern(s)
items = finder(path_in, pattern.split(", "), regex=True if reg == "True" else False)

if len(items) > 0:
    path_out = os.path.dirname(file_out)
    if not os.path.exists(path_out):
        os.makedirs(path_out)
    print "the following files will be stacked to file {0}:".format(file_out)
    for item in items:
        print item
    decision = raw_input("proceed (y/n)?: ")
    if decision == "y":
        vrt = file_out+".vrt"
        sp.check_call(dissolve(["gdalbuildvrt", "-q", "-overwrite", "-separate", vrt, items]))
        sp.check_call(dissolve(["gdalwarp", "-q", "-overwrite", resampling, format, nodata, extent, targetres, vrt, file_out]))
        os.remove(vrt)
Ejemplo n.º 12
0
import sys

import os
import subprocess as sp

from ancillary import finder

with open(sys.argv[1], "r") as inlist:
    processlist = [
        line.split() for line in inlist
        if not line.startswith("#") and not line.strip() == ""
    ]

for couple in processlist:
    slc1 = finder(os.getcwd(), [couple[0] + "$"], regex=True)
    slc2 = finder(os.getcwd(), [couple[1] + "$"], regex=True)

    if len(slc1) != 1:
        print "descriptor", slc1, "ambiguous or file not existing"
        continue
    elif len(slc2) != 1:
        print "descriptor", slc2, "ambiguous or file not existing"
        continue
    else:
        sp.check_call([
            "python",
            os.path.join(os.path.dirname(sys.argv[0]), "coreg.py"), slc1[0],
            slc2[0]
        ],
                      cwd=os.getcwd())
Ejemplo n.º 13
0
import os
import re

from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/baseline.par"))

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.mkdir(path)

interferograms = finder(path_out, ["*int"])

if len(interferograms) > 0:
    print "#############################################"
    print "interferogram flattening started..."
    for name_int in interferograms:
        print os.path.basename(name_int)

        # retrieve full name of primary and secondary SLC files
        scenes = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)?",
                            name_int)
        primary = finder(os.getcwd(), [scenes[0] + "$"], regex=True)[0]
        # secondary = finder(os.getcwd(), [scenes[1]+"_reg"], regex=True)[0]
        secondary = name_int[:-3] + "reg"

        # collect geocoding lookup tables
Ejemplo n.º 14
0
        counter += 1
        print scene.basename
        rlks = ReadPar(scene.HH_mli + ".par").range_looks
        azlks = ReadPar(scene.HH_mli + ".par").azimuth_looks
        run([
            "CLOUDE_DEC", scene.HH_slc, scene.HV_slc, scene.VV_slc, scene.t12,
            scene.t13,
            ReadPar(scene.HH_slc + ".par").range_samples, scene.basename, rlks,
            azlks
        ], os.path.dirname(scene.t12), path_log)
        # create envi header files (note: number of lines must be reduced by 1 on import into envi)
        for i in range(1, 4):
            hdr(
                scene.HH_mli + ".par",
                os.path.join(os.path.dirname(scene.t12), scene.basename) +
                "_ctd_" + str(i) + "_mag.hdr")

if counter == 0:
    print "no scenes with required scattering and coherency matrix elements found"

# rename files to consistent pattern
for pattern in ["*.ctd*", "*.mag", "*.pha"]:
    for filename in finder(os.getcwd(), [pattern]):
        os.rename(
            filename,
            filename.replace(pattern.strip("*"),
                             pattern.strip("*").replace(".", "_")))

print "...done"
print "#############################################"
Ejemplo n.º 15
0
from isp_parameterfile import ISPPar

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/ISP/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.makedirs(path)

par = ReadPar(os.path.join(os.getcwd(), "PAR", "coherence_ad.par"), type="exe")

# retrieve additional arguments from script call
differential = True if par.differential == "True" else False

# find flattened interferograms
list_flt = finder(path_out, ["*_int_diff"]) if differential else finder(
    path_out, ["*_flt"])

if len(list_flt) > 0:
    print "#############################################"
    print "estimation started..."

    for name_flt in list_flt:
        # extract timestamps from flt name
        id_pwr = re.findall("[A-Z0-9_]{10}[0-9T]{15}_[HV]{2}_slc(?:_cal)",
                            name_flt)
        # find mli/rmli images matching the extracted timestamps
        try:
            name_mli = finder(os.getcwd(), ["*" + id_pwr[0] + "_mli"])[0]
            # name_rmli = finder(os.getcwd(), ["*"+id_pwr[1]+"_reg_mli"])[0]
            name_rmli = name_flt[:
Ejemplo n.º 16
0
from ancillary import finder, ReadPar, run

# read parameter file
par = ReadPar(os.path.join(os.getcwd(), "PAR/cal_slc.par"))

# define (and create) directories for processing results and logfile
path_log = os.path.join(os.getcwd(), "LOG/GEO/")
path_out = os.path.join(os.getcwd(), "ISP/")
for path in [path_log, path_out]:
    if not os.path.exists(path):
        os.makedirs(path)

list_K_dB = {"PSR1": "-115.0"}

list_slc = finder(os.getcwd(), ["*_slc"])

if len(list_slc) > 0:
    print "#############################################"
    print "calibration started..."

    for name_slc in list_slc:
        sensor = name_slc.split("_")[0]
        if sensor in list_K_dB:
            K_dB = list_K_dB[sensor]
        else:
            print "calibration for sensor " + sensor + "not implemented"

        name_cslc = name_slc[:-3] + "cslc"

        run([
Ejemplo n.º 17
0
##############################################################
# delete/remove files following defined patterns in the current directory and its subdirectories
# module of software gammaGUI
# John Truckenbrodt 2015
##############################################################

import sys

import os
import shutil

from ancillary import finder

# find all files matching the defined pattern(s)
items = finder(os.getcwd(),
               sys.argv[1].split(", "),
               regex=True if sys.argv[3] == "True" else False)

# exclude files in the export directory
items = [x for x in items if "/EXP/" not in x]

if len(items) > 0:
    path_exp = os.path.join(os.getcwd(), "EXP/")
    if sys.argv[2] == "export" and not os.path.exists(path_exp):
        os.makedirs(path_exp)
    print "the following files will be", {
        "export": "exported",
        "delete": "deleted"
    }[sys.argv[2]] + ":"
    for item in items:
        print item