Example #1
0
def main():
    """
    legacy compatibility to gammaGUI interface; will be remove in the future
    """
    print('#############################################')
    print('preparing SRTM mosaic:')
    # read parameter textfile
    par = ReadPar(os.path.join(os.getcwd(), 'PAR/srtm.par'))

    demdir = None
    if hasattr(par, 'SRTM_archive'):
        if os.path.isdir(par.SRTM_archive):
            demdir = par.SRTM_archive

    parfiles = finder(os.getcwd(), ['*slc.par', '*mli.par', '*cal.par'])

    # define (and create) directories for processing results and logfile
    path_dem = os.path.join(os.getcwd(), 'DEM/')
    path_log = os.path.join(os.getcwd(), 'LOG/GEO/')
    for path in [path_log, path_dem]:
        if not os.path.exists(path):
            os.makedirs(path)

    # find SRTM tiles for mosaicing
    demlist = hgt_collect(parfiles,
                          path_dem,
                          demdir=demdir,
                          arcsec=int(par.arcsec))

    # remove files created by this function
    for item in finder(path_dem, ['mosaic*', 'dem*', '*.par']):
        os.remove(item)

    if len(demlist) == 0:
        raise IOError('no hgt files found')

    # perform mosaicing if multiple files are found
    if len(demlist) > 1:
        print('mosaicing...')
        dem = os.path.join(path_dem, 'mosaic')
        mosaic(demlist, dem)
    else:
        dem = demlist[0]
        dempar(dem)
    fill(dem, os.path.join(path_dem, 'dem_final'), path_log)
    dem = os.path.join(path_dem, 'dem_final')

    # transform DEM to UTM
    if par.utm == 'True':
        print('transforming to UTM...')
        transform(dem, dem + '_utm', int(par.targetres))
        hdr(dem + '_utm.par')
    print('...done')
Example #2
0
def gpt(xmlfile):
    """
    wrapper for ESA SNAP Graph Processing Tool GPT
    input is a readily formatted workflow xml file as created by function geocode in module snap.util
    """
    try:
        gpt_exec = ExamineSnap().gpt
    except AttributeError:
        raise RuntimeError('could not find SNAP GPT executable')
    
    with open(xmlfile, 'r') as infile:
        workflow = ET.fromstring(infile.read())
    write = workflow.find('.//node[@id="Write"]')
    outname = write.find('.//parameters/file').text
    outdir = os.path.dirname(outname)
    format = write.find('.//parameters/formatName').text
    infile = workflow.find('.//node[@id="Read"]/parameters/file').text
    
    if format == 'GeoTiff-BigTIFF':
        cmd = [gpt_exec,
               # '-Dsnap.dataio.reader.tileWidth=*',
               # '-Dsnap.dataio.reader.tileHeight=1',
               '-Dsnap.dataio.bigtiff.tiling.width=256',
               '-Dsnap.dataio.bigtiff.tiling.height=256',
               # '-Dsnap.dataio.bigtiff.compression.type=LZW',
               # '-Dsnap.dataio.bigtiff.compression.quality=0.75',
               xmlfile]
    else:
        cmd = [gpt_exec, xmlfile]
    
    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
    out, err = proc.communicate()
    out = out.decode('utf-8') if isinstance(out, bytes) else out
    err = err.decode('utf-8') if isinstance(err, bytes) else err
    if proc.returncode != 0:
        if os.path.isfile(outname + '.tif'):
            os.remove(outname + '.tif')
        elif os.path.isdir(outname):
            shutil.rmtree(outname)
        print(out + err)
        print('failed: {}'.format(os.path.basename(infile)))
        err_match = re.search('Error: (.*)\n', out + err)
        errmessage = err_match.group(1) if err_match else err
        raise RuntimeError(errmessage)
    
    if format == 'ENVI':
        suffix = parse_suffix(workflow)
        translateoptions = {'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
                            'format': 'GTiff',
                            'noData': 0}
        for item in finder(outname, ['*.img']):
            pol = re.search('[HV]{2}', item).group()
            name_new = outname.replace(suffix, '{0}_{1}.tif'.format(pol, suffix))
            gdal_translate(item, name_new, translateoptions)
        shutil.rmtree(outname)
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
Example #3
0
def gpt(xmlfile):
    """
    wrapper for ESA SNAP Graph Processing Tool GPT
    input is a readily formatted workflow xml file as created by function geocode in module snap.util
    """
    try:
        snap_exec = ExamineSnap().path
    except AttributeError:
        raise RuntimeError('could not find SNAP executable')
    gpt_exec = os.path.join(os.path.dirname(snap_exec), 'gpt')

    with open(xmlfile, 'r') as infile:
        workflow = ET.fromstring(infile.read())
    write = workflow.find('.//node[@id="Write"]')
    outname = write.find('.//parameters/file').text
    outdir = os.path.dirname(outname)
    format = write.find('.//parameters/formatName').text
    infile = workflow.find('.//node[@id="Read"]/parameters/file').text

    if format == 'GeoTiff-BigTIFF':
        cmd = [gpt_exec,
               # '-Dsnap.dataio.reader.tileWidth=*',
               # '-Dsnap.dataio.reader.tileHeight=1',
               '-Dsnap.dataio.bigtiff.tiling.width=256',
               '-Dsnap.dataio.bigtiff.tiling.height=256',
               # '-Dsnap.dataio.bigtiff.compression.type=LZW',
               # '-Dsnap.dataio.bigtiff.compression.quality=0.75',
               xmlfile]
    else:
        cmd = [gpt_exec, xmlfile]

    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
    out, err = proc.communicate()
    if proc.returncode != 0:
        if os.path.isfile(outname + '.tif'):
            os.remove(outname + '.tif')
        elif os.path.isdir(outname):
            shutil.rmtree(outname)
        print(out + err)
        print('failed: {}'.format(os.path.basename(infile)))
        err_match = re.search('Error: (.*)\n', out + err)
        errmessage = err_match.group(1) if err_match else err
        raise RuntimeError(errmessage)

    if format == 'ENVI':
        id = pyroSAR.identify(infile)
        suffix = parse_suffix(workflow)
        for item in finder(outname, ['*.img']):
            pol = re.search('[HV]{2}', item).group()
            name_new = os.path.join(outdir, '{}_{}_{}.tif'.format(id.outname_base(), pol, suffix))
            translateoptions = {'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'], 'format': 'GTiff'}
            gdal_translate(item, name_new, translateoptions)
        shutil.rmtree(outname)
Example #4
0
 def __identify_snap(self):
     """
     do a comprehensive search for an ESA SNAP installation
     
     Returns
     -------
     bool
         has the SNAP properties file been changed?
     """
     # create a list of possible SNAP executables
     defaults = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
     paths = os.environ['PATH'].split(os.path.pathsep)
     options = [os.path.join(path, option) for path in paths for option in defaults]
     options = [x for x in options if os.path.isfile(x)]
     
     if not hasattr(self, 'path') or not os.path.isfile(self.path):
         executables = options
     else:
         executables = [self.path] + options
     
     # for each possible SNAP executable, check whether additional files and directories exist relative to it
     # to confirm whether it actually is a ESA SNAP installation or something else like e.g. the Ubuntu App Manager
     for path in executables:
         if os.path.islink(path):
             path = os.path.realpath(path)
         
         # check whether a directory etc exists relative to the SNAP executable
         etc = os.path.join(os.path.dirname(os.path.dirname(path)), 'etc')
         if not os.path.isdir(etc):
             continue
         
         # check the content of the etc directory
         auxdata = os.listdir(etc)
         if 'snap.auxdata.properties' not in auxdata:
             continue
         else:
             auxdata_properties = os.path.join(etc, 'snap.auxdata.properties')
         
         # identify the gpt executable
         gpt_candidates = finder(os.path.dirname(path), ['gpt', 'gpt.exe'])
         if len(gpt_candidates) == 0:
             continue
         else:
             gpt = gpt_candidates[0]
         
         self.path = path
         self.etc = etc
         self.gpt = gpt
         self.auxdata = auxdata
         self.properties = auxdata_properties
         return
     
     warnings.warn('SNAP could not be identified')
Example #5
0
def test_finder(tmpdir):
    dir = str(tmpdir)
    dir_sub1 = os.path.join(dir, 'testdir1')
    dir_sub2 = os.path.join(dir, 'testdir2')
    os.makedirs(dir_sub1)
    os.makedirs(dir_sub2)
    with open(os.path.join(dir_sub1, 'testfile1.txt'), 'w') as t1:
        t1.write('test')
    with open(os.path.join(dir_sub2, 'testfile2.txt'), 'w') as t2:
        t2.write('test')
    assert len(anc.finder(dir, ['test*'], foldermode=0)) == 2
    assert len(anc.finder(dir, ['test*'], foldermode=0, recursive=False)) == 0
    assert len(anc.finder(dir, ['test*'], foldermode=1)) == 4
    assert len(anc.finder(dir, ['test*'], foldermode=2)) == 2
    assert len(anc.finder([dir_sub1, dir_sub2], ['test*'])) == 2
    with pytest.raises(TypeError):
        anc.finder(1, [])
Example #6
0
def makeSRTM(scenes, srtmdir, outname):
    """
    Create a DEM from SRTM tiles
    Input is a list of pyroSAR.ID objects from which coordinates are read to determine the required DEM extent
    Mosaics SRTM DEM tiles, converts them to Gamma format and subtracts offset to WGS84 ellipsoid
    for DEMs downloaded from USGS http://gdex.cr.usgs.gov or CGIAR http://srtm.csi.cgiar.org
    """

    tempdir = outname + '___temp'
    os.makedirs(tempdir)

    hgt_options = hgt(scenes)

    hgt_files = finder(srtmdir, hgt_options)

    # todo: check if really needed
    nodatas = [str(int(raster.Raster(x).nodata)) for x in hgt_files]

    srtm_vrt = os.path.join(tempdir, 'srtm.vrt')
    srtm_temp = srtm_vrt.replace('.vrt', '_tmp')
    srtm_final = srtm_vrt.replace('.vrt', '')

    run([
        'gdalbuildvrt', '-overwrite', '-srcnodata', ' '.join(nodatas),
        srtm_vrt, hgt_files
    ])

    run([
        'gdal_translate', '-of', 'ENVI', '-a_nodata', -32768, srtm_vrt,
        srtm_temp
    ])

    process(['srtm2dem', srtm_temp, srtm_final, srtm_final + '.par', 2, '-'],
            outdir=tempdir)

    shutil.move(srtm_final, outname)
    shutil.move(srtm_final + '.par', outname + '.par')
    hdr(outname + '.par')

    shutil.rmtree(tempdir)
Example #7
0
def main():
    # define input directory containing file sto be stacked
    dir_in = '/...'

    # define output file name
    dstfile = '/.../x'

    # shapefile (for stack boundaries)
    shp = '/../x.shp'

    # store results in separate files or one single stack file? If separate then dstfile is used as a directory.
    sep = True

    # list files to be resampled; those not overlapping with the shapefile geometry will excluded by function stack
    srcfiles = finder(dir_in, ['S1*_VV_*norm_db.tif'])

    # check whether dstfile is already a file
    if os.path.isfile(dstfile):
        raise IOError('dstfile already exists')

    # create groups of similar time stamps for mosaicking.
    # All images with a time stamp of less than 30s difference will be grouped
    groups = groupbyTime(srcfiles, seconds, 30)

    # final function call
    # groups will be mosaicked first
    # the resulting images will all have the same extent
    stack(srcfiles=groups,
          dstfile=dstfile,
          resampling='bilinear',
          targetres=[20, 20],
          srcnodata=-99,
          dstnodata=-99,
          shapefile=shp,
          sortfun=seconds,
          separate=sep,
          overwrite=False)
Example #8
0
def hgt_collect(parfiles, outdir, demdir=None, arcsec=3):
    """
    automatic downloading and unpacking of srtm tiles
    base directory must contain SLC files in GAMMA format including their parameter files for reading coordinates
    additional dem directory may locally contain srtm files. This directory is searched for locally existing files, which are then copied to the current working directory
    """

    # concatenate required hgt tile names
    target_ids = hgt(parfiles)

    targets = []

    pattern = '[NS][0-9]{2}[EW][0-9]{3}'

    # if an additional dem directory has been defined, check this directory for required hgt tiles
    if demdir is not None:
        targets.extend(finder(demdir, target_ids))

    # check for additional potentially existing hgt tiles in the defined output directory
    extras = [
        os.path.join(outdir, x) for x in target_ids
        if os.path.isfile(os.path.join(outdir, x))
        and not re.search(x, '\n'.join(targets))
    ]
    targets.extend(extras)

    print('found {} relevant SRTM tiles...'.format(len(targets)))

    # search server for all required tiles, which were not found in the local directories
    if len(targets) < len(target_ids):
        print('searching for additional SRTM tiles on the server...')
        onlines = []

        if arcsec == 1:
            remotes = [
                'http://e4ftl01.cr.usgs.gov/SRTM/SRTMGL1.003/2000.02.11/'
            ]
            remotepattern = pattern + '.SRTMGL1.hgt.zip'
        elif arcsec == 3:
            server = 'http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'
            remotes = [
                os.path.join(server, x) for x in [
                    'Africa', 'Australia', 'Eurasia', 'Islands',
                    'North_America', 'South_America'
                ]
            ]
            remotepattern = pattern + '[.]hgt.zip'
        else:
            raise ValueError('argument arcsec must be of value 1 or 3')

        for remote in remotes:
            response = urlopen(remote).read()
            items = sorted(set(re.findall(remotepattern, response)))
            for item in items:
                outname = re.findall(pattern, item)[0] + '.hgt'
                if outname in target_ids and outname not in [
                        os.path.basename(x) for x in targets
                ]:
                    onlines.append(os.path.join(remote, item))

        # if additional tiles have been found online, download and unzip them to the local directory
        if len(onlines) > 0:
            print('downloading {} SRTM tiles...'.format(len(onlines)))
            for candidate in onlines:
                localname = os.path.join(
                    outdir,
                    re.findall(pattern, candidate)[0] + '.hgt')
                infile = urlopen(candidate)
                with open(localname + '.zip', 'wb') as outfile:
                    outfile.write(infile.read())
                infile.close()
                with zf.ZipFile(localname + '.zip', 'r') as z:
                    z.extractall(outdir)
                os.remove(localname + '.zip')
                targets.append(localname)
    return targets
Example #9
0
def test_Raster():
    with pytest.raises(OSError):
        ras = Raster('foobar')
    ras = Raster(
        'pyroSAR/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'
    )
    assert ras.bands == 1
    assert ras.proj4 == '+proj=utm +zone=31 +datum=WGS84 +units=m +no_defs '
    assert ras.cols == 268
    assert ras.rows == 217
    assert ras.dim == [217, 268, 1]
    assert ras.dtype == 'Float32'
    assert ras.dtypes(ras.dtype) == 6
    assert ras.epsg == 32631
    assert ras.format == 'GTiff'
    assert ras.geo == {
        'ymax': 4830114.70107,
        'rotation_y': 0.0,
        'rotation_x': 0.0,
        'xmax': 625408.241204,
        'xres': 20.0,
        'xmin': 620048.241204,
        'ymin': 4825774.70107,
        'yres': -20.0
    }
    assert ras.typemap() == {
        'int32': 5,
        'int16': 3,
        'float64': 7,
        'complex128': 11,
        'uint8': 1,
        'uint16': 2,
        'complex64': 10,
        'uint32': 4,
        'int8': 1,
        'float32': 6
    }
    assert ras.geogcs == 'WGS 84'
    assert ras.is_valid() is True
    assert ras.proj4args == {
        'units': 'm',
        'no_defs': None,
        'datum': 'WGS84',
        'proj': 'utm',
        'zone': '31'
    }
    assert ras.allstats == [[
        -26.65471076965332, 1.4325850009918213, -12.124929534450377,
        4.738273594738293
    ]]
    assert ras.bbox().getArea() == 23262400.0
    assert len(ras.layers()) == 1
    ras.load()
    mat = ras.matrix()
    assert isinstance(mat, np.ndarray)
    ras.assign(mat)
    # ras.reduce()
    ras.rescale(lambda x: 10 * x)
    ras.write(
        'pyroSAR/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db_new.tif'
    )
    for item in finder('pyroSAR/tests/data', ['S1A*_new*']):
        os.remove(item)
Example #10
0
def test_stack():
    name = 'pyroSAR/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'
    outname = 'pyroSAR/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db_sub'
    tr = (30, 30)
    with pytest.raises(IOError):
        stack(srcfiles=[],
              resampling='near',
              targetres=tr,
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname)

    with pytest.raises(IOError):
        stack(srcfiles=[name, name],
              resampling='near',
              targetres=tr,
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname,
              layernames=['a'])

    with pytest.raises(RuntimeError):
        stack(srcfiles=[name, name],
              resampling='near',
              targetres=30,
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname)

    with pytest.raises(RuntimeError):
        stack(srcfiles=[name, name],
              resampling='near',
              targetres=(30, 30, 30),
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname)

    with pytest.raises(IOError):
        stack(srcfiles=[name, name],
              resampling='foobar',
              targetres=tr,
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname)

    with pytest.raises(OSError):
        stack(srcfiles=[name, 'foobar'],
              resampling='near',
              targetres=tr,
              srcnodata=-99,
              dstnodata=-99,
              dstfile=outname)

    stack(srcfiles=[name, name],
          resampling='near',
          targetres=tr,
          overwrite=True,
          srcnodata=-99,
          dstnodata=-99,
          dstfile=outname)
    for item in finder('pyroSAR/tests/data', ['S1A*_sub*']):
        os.remove(item)
Example #11
0
                                        product='GRD',
                                        acquisition_mode='IW',
                                        vv=1)

    print('{0}: {1} scenes found for site {2}'.format(socket.gethostname(),
                                                      len(selection_proc),
                                                      sitename))
    #######################################################################################
    # call to processing utility
    if len(selection_proc) > 1:
        print('start processing')

        for scene in selection_proc:
            geocode(infile=scene, outdir=outdir, tr=resolution, scaling='db')
    return len(selection_proc)


if __name__ == '__main__':
    #######################################################################################
    # update Sentinel-1 GRD scene archive database

    # define a directory containing zipped scene archives and list all files starting with 'S1A' or 'S1B'
    archive_s1 = '/.../sentinel1/GRD'
    scenes_s1 = finder(archive_s1, ['^S1[AB]'], regex=True, recursive=False)

    with Archive(dbfile) as archive:
        archive.insert(scenes_s1)
    #######################################################################################
    # start the processing
    results = list(futures.map(worker, sites))