コード例 #1
0
ファイル: example_stacking.py プロジェクト: ywg0212/pyroSAR
def main():
    # define input directory containing file sto be stacked
    dir_in = '/...'
    
    # define output file name
    dstfile = '/.../x'
    
    # shapefile (for stack boundaries)
    shp = '/../x.shp'
    
    # store results in separate files or one single stack file? If separate then dstfile is used as a directory.
    sep = True
    
    # list files to be resampled; those not overlapping with the shapefile geometry will excluded by function stack
    srcfiles = finder(dir_in, ['S1*_VV_*norm_db.tif'])
    
    # check whether dstfile is already a file
    if os.path.isfile(dstfile):
        raise IOError('dstfile already exists')
    
    # create groups of similar time stamps for mosaicking.
    # All images with a time stamp of less than 30s difference will be grouped
    groups = groupbyTime(srcfiles, seconds, 30)
    
    # final function call
    # groups will be mosaicked first
    # the resulting images will all have the same extent
    stack(srcfiles=groups, dstfile=dstfile, resampling='bilinear',
          targetres=[20, 20], srcnodata=-99, dstnodata=-99,
          shapefile=shp, sortfun=seconds, separate=sep, overwrite=False)
コード例 #2
0
ファイル: examine.py プロジェクト: xingyaozhang1/pyroSAR
    def __identify_snap(self):
        """
        do a comprehensive search for an ESA SNAP installation
        
        Returns
        -------
        bool
            has the SNAP properties file been changed?
        """
        # create a list of possible SNAP executables
        defaults = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
        paths = os.environ['PATH'].split(os.path.pathsep)
        options = [
            os.path.join(path, option) for path in paths for option in defaults
        ]
        options = [x for x in options if os.path.isfile(x)]

        if not hasattr(self, 'path') or not os.path.isfile(self.path):
            executables = options
        else:
            executables = [self.path] + options

        # for each possible SNAP executable, check whether additional files and directories exist relative to it
        # to confirm whether it actually is a ESA SNAP installation or something else like e.g. the Ubuntu App Manager
        for path in executables:
            if os.path.islink(path):
                path = os.path.realpath(path)

            # check whether a directory etc exists relative to the SNAP executable
            etc = os.path.join(os.path.dirname(os.path.dirname(path)), 'etc')
            if not os.path.isdir(etc):
                continue

            # check the content of the etc directory
            auxdata = os.listdir(etc)
            if 'snap.auxdata.properties' not in auxdata:
                continue
            else:
                auxdata_properties = os.path.join(etc,
                                                  'snap.auxdata.properties')

            # identify the gpt executable
            gpt_candidates = finder(os.path.dirname(path), ['gpt', 'gpt.exe'])
            if len(gpt_candidates) == 0:
                continue
            else:
                gpt = gpt_candidates[0]

            self.path = path
            self.etc = etc
            self.gpt = gpt
            self.auxdata = auxdata
            self.properties = auxdata_properties
            return

        warnings.warn(
            'SNAP could not be identified. If you have installed it please add the path to the SNAP '
            'executables (bin subdirectory) to the PATH environment. '
            'E.g. in the Linux .bashrc file add the following line:\nexport PATH=$PATH:path/to/snap/bin"'
        )
コード例 #3
0
    def _reorganize(self):
        """
        compress and move EOF files into subdirectories

        Returns
        -------

        """
        message = True
        for subdir in [self.outdir_poe, self.outdir_res]:
            if not os.path.isdir(subdir):
                continue
            files = finder(subdir, [self.pattern], recursive=False, regex=True)
            for eof in files:
                base = os.path.basename(eof)
                target = os.path.join(self._subdir(eof), base + '.zip')
                os.makedirs(os.path.dirname(target), exist_ok=True)
                if not os.path.isfile(target):
                    if message:
                        print('compressing and reorganizing EOF files')
                        message = False
                    with zf.ZipFile(file=target,
                                    mode='w',
                                    compression=zf.ZIP_DEFLATED) as zip:
                        zip.write(filename=eof, arcname=base)
                os.remove(eof)
コード例 #4
0
ファイル: dem.py プロジェクト: lidahuilidahui/pyroSAR
def makeSRTM(scenes, srtmdir, outname):
    """
    Create a DEM in Gamma format from SRTM tiles

    - coordinates are read to determine the required DEM extent and select the necessary hgt tiles
    - mosaics SRTM DEM tiles, converts them to Gamma format and subtracts offset to WGS84 ellipsoid

    intended for SRTM products downloaded from:

    - USGS: https://gdex.cr.usgs.gov/gdex/
    - CGIAR: http://srtm.csi.cgiar.org

    Parameters
    ----------
    scenes: list of str or pyroSAR.ID
        a list of Gamma parameter files or pyroSAR ID objects to read the DEM extent from
    srtmdir: str
        a directory containing the SRTM hgt tiles
    outname: str
        the name of the final DEM file

    Returns
    -------

    """
    
    tempdir = outname + '___temp'
    os.makedirs(tempdir)
    
    hgt_options = hgt(scenes)
    
    hgt_files = finder(srtmdir, hgt_options)
    
    nodatas = list(set([raster.Raster(x).nodata for x in hgt_files]))
    if len(nodatas) == 1:
        nodata = nodatas[0]
    else:
        raise RuntimeError('different nodata values are not permitted')
    
    srtm_vrt = os.path.join(tempdir, 'srtm.vrt')
    srtm_temp = srtm_vrt.replace('.vrt', '_tmp')
    srtm_final = srtm_vrt.replace('.vrt', '')
    
    gdalbuildvrt(hgt_files, srtm_vrt, {'srcNodata': nodata, 'options': ['-overwrite']})
    
    gdal_translate(srtm_vrt, srtm_temp, {'format': 'ENVI', 'noData': nodata})
    
    diff.srtm2dem(SRTM_DEM=srtm_temp,
                  DEM=srtm_final,
                  DEM_par=srtm_final + '.par',
                  gflg=2,
                  geoid='-',
                  outdir=tempdir)
    
    shutil.move(srtm_final, outname)
    shutil.move(srtm_final + '.par', outname + '.par')
    par2hdr(outname + '.par', outname + '.hdr')
    
    shutil.rmtree(tempdir)
コード例 #5
0
def test_geocode(tmpdir, testdata):
    scene = testdata['s1']
    geocode(scene, str(tmpdir), test=True)
    xmlfile = finder(str(tmpdir), ['*.xml'])[0]
    tree = Workflow(xmlfile)
    nodes = tree.nodes()
    assert is_consistent(nodes) is True
    groups = groupbyWorkers(xmlfile, 2)
    split(xmlfile, groups)
    id = identify(scene)
    basename = '{}_{}'.format(id.outname_base(), tree.suffix)
    procdir = os.path.join(str(tmpdir), basename)
    assert os.path.isdir(procdir)
    tempdir = os.path.join(procdir, 'temp')
    assert os.path.isdir(tempdir)
    parts = finder(tempdir, ['*.xml'])
    assert len(parts) == 4
コード例 #6
0
ファイル: collect_suffices.py プロジェクト: ywg0212/pyroSAR
def main():
    # some arbitrary directory for the source code
    workdir = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap_code')
    os.makedirs(workdir, exist_ok=True)

    # the name of the Java properties file containing the operator-suffix lookup
    outfile = 'snap.suffices.properties'

    # clone all relevant toolboxes
    for tbx in ['snap-engine', 'snap-desktop', 's1tbx']:
        print(tbx)
        target = os.path.join(workdir, tbx)
        if not os.path.isdir(target):
            url = 'https://github.com/senbox-org/{}'.format(tbx)
            sp.check_call(['git', 'clone', url], cwd=workdir)
        else:
            sp.check_call(['git', 'pull'], cwd=target)

    # search patterns for relevant files
    # Usually files containing operator classes are named <operator>Op.java but with out dashes
    # e.g. TerrainFlatteningOp.java for the Terrain-Flattening operator
    # One exception is Calibration for which there is a sub-class for each SAR sensor
    operators = finder(workdir, ['*Op.java', 'BaseCalibrator.java'])

    # a list for collection the suffices
    collect = []

    for op in operators:
        with open(op) as infile:
            content = infile.read()

        # the suffix is defined as a class attribute PRODUCT_SUFFIX
        pattern = 'String PRODUCT_SUFFIX = \"_([a-zA-Z]*)\"'
        match = re.search(pattern, content)
        if match:
            suffix = match.groups()[0]
        else:
            suffix = ''

        # the name of the operator as available in the UI
        pattern = 'alias = \"([a-zA-Z-]*)\"'
        match = re.search(pattern, content)
        if match:
            alias = match.groups()[0]
        else:
            alias = None

        # only collect operators for which an alias exists, i.e. which are exposed in the UI,
        # and for which a suffix is defined. In the UI, all operators for which no suffix exists
        # will just get no suffix in any written file.
        if alias is not None and suffix != '':
            print(alias, suffix)
            collect.append('{0}={1}'.format(alias, suffix))

    print('found {} matching operators'.format(len(collect)))

    with open(outfile, 'w') as out:
        out.write('\n'.join(sorted(collect, key=str.lower)))
コード例 #7
0
ファイル: parser.py プロジェクト: trevorskaggs/pyroSAR
def parse_module(bindir, outfile):
    """
    parse all Gamma commands of a module to functions and save them to a Python script.

    Parameters
    ----------
    bindir: str
        the `bin` directory of a module containing the commands
    outfile: str
        the name of the Python file to write

    Returns
    -------
    
    Examples
    --------
    >>> import os
    >>> from pyroSAR.gamma.parser import parse_module
    >>> outname = os.path.join(os.environ['HOME'], 'isp.py')
    >>> parse_module('/cluster/GAMMA_SOFTWARE-20161207/ISP/bin', outname)
    """
    
    if not os.path.isdir(bindir):
        raise OSError('directory does not exist: {}'.format(bindir))
    
    excludes = ['coord_trans',  # doesn't take any parameters and is interactive
                'RSAT2_SLC_preproc',  # takes option flags
                'mk_ASF_CEOS_list',  # "cannot create : Directory nonexistent"
                '2PASS_UNW',  # parameter name inconsistencies
                'mk_diff_2d',  # takes option flags
                'gamma_doc'  # opens the Gamma documentation
                ]
    failed = []
    outstring = ''
    for cmd in sorted(finder(bindir, [r'^\w+$'], regex=True), key=lambda s: s.lower()):
        basename = os.path.basename(cmd)
        if basename not in excludes:
            # print(basename)
            try:
                fun = parse_command(cmd)
            except RuntimeError as e:
                failed.append('{0}: {1}'.format(basename, str(e)))
                continue
            except DeprecationWarning:
                continue
            except:
                failed.append('{0}: {1}'.format(basename, 'error yet to be assessed'))
                continue
            outstring += fun + '\n\n'
    if len(outstring) > 0:
        if not os.path.isfile(outfile):
            with open(outfile, 'w') as out:
                out.write('from pyroSAR.gamma.auxil import process\n\n\n')
        with open(outfile, 'a') as out:
            out.write(outstring)
    if len(failed) > 0:
        print('the following functions could not be parsed:\n{0}\n({1} total)'.format('\n'.join(failed), len(failed)))
コード例 #8
0
def uzh_prepare(reference, outdir, source):
    """
    create an UZH incident angle subset resampled to a reference image.
    
    Parameters
    ----------
    reference: str
        the reference file with the target extent
    outdir: str
        the directory to write the new file to;
        new files are named uzh_{epsg}_{index}.tif, e.g. uzh_4326_1.tif.
    source: str
        the original product to be subsetted

    Returns
    -------
    numpy.ndarray
        the content of the file written to `outdir`
    """
    with Raster(reference) as ras:
        xRes, yRes = ras.res
        epsg = ras.epsg
        ext = ras.extent
    
    warp_opts = {'options': ['-q'], 'format': 'GTiff', 'multithread': True,
                 'dstNodata': -99, 'resampleAlg': 'bilinear'}
    
    if not os.path.isdir(outdir):
        os.makedirs(outdir)
    
    # find existing files
    uzh_subs = finder(outdir, ['uzh_[0-9]{4,5}_[0-9].tif'], regex=True)
    
    # check if any of the existing files matches the extent of the reference
    match = False
    if len(uzh_subs) > 0:
        for j, sub in enumerate(uzh_subs):
            with Raster(sub) as ras:
                if ras.extent == ext:
                    uzh_sub = sub
                    match = True
    if not match:
        with Raster(source) as ras:
            if ras.epsg != epsg:
                raise RuntimeError('CRS mismatch')
        
        basename = 'uzh_{}_{}.tif'.format(epsg, len(uzh_subs))
        uzh_sub = os.path.join(outdir, basename)
        print('creating', uzh_sub)
        warp_opts['dstSRS'] = 'EPSG:{}'.format(epsg)
        warp_opts['xRes'] = xRes
        warp_opts['yRes'] = yRes
        warp_opts['outputBounds'] = (ext['xmin'], ext['ymin'],
                                     ext['xmax'], ext['ymax'])
        gdalwarp(src=source, dst=uzh_sub, options=warp_opts)
    return uzh_sub
コード例 #9
0
ファイル: util.py プロジェクト: sumesh1/S1_ARD
def clc_prepare(reference, outdir, source):
    """
    create a CLC subset resampled to a reference image.
    
    Parameters
    ----------
    reference: str
        the reference file with the target CRS and extent
    outdir: str
        the directory to write the new file to;
        new files are named clc{index}.tif, e.g. clc1.tif.
    source: str
        the original product to be subsetted

    Returns
    -------
    str
        the name of the file written to `outdir`
    """
    with Raster(reference) as ras:
        xRes, yRes = ras.res
        epsg = ras.epsg
        ext = ras.extent

    #########################################################################
    warp_opts = {
        'options': ['-q'],
        'format': 'GTiff',
        'multithread': True,
        'dstNodata': -99,
        'resampleAlg': 'mode'
    }

    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    clc_subs = finder(outdir, ['clc[0-9].tif'], regex=True)

    match = False
    if len(clc_subs) > 0:
        for j, sub in enumerate(clc_subs):
            with Raster(sub) as ras:
                if ras.extent == ext:
                    clc_sub = sub
                    match = True
    if not match:
        clc_sub = os.path.join(outdir, 'clc{}.tif'.format(len(clc_subs)))
        print('creating', clc_sub)
        warp_opts['dstSRS'] = 'EPSG:{}'.format(epsg)
        warp_opts['xRes'] = xRes
        warp_opts['yRes'] = yRes
        warp_opts['outputBounds'] = (ext['xmin'], ext['ymin'], ext['xmax'],
                                     ext['ymax'])
        gdalwarp(src=source, dst=clc_sub, options=warp_opts)
    return clc_sub
コード例 #10
0
def autoparse():
    """
    automatic parsing of Gamma commands.
    This function will detect the Gamma installation via environment variable `GAMMA_HOME`, detect all available
    modules (e.g. ISP, DIFF) and parse all of the module's commands via function :func:`parse_module`.
    A new Python module will be created called `gammaparse`, which is stored under `$HOME/.pyrosar`.
    Upon importing the `pyroSAR.gamma` submodule, this function is run automatically and module `gammaparse`
    is imported as `api`.
    
    Returns
    -------

    Examples
    --------
    >>> from pyroSAR.gamma.api import diff
    >>> print('create_dem_par' in dir(diff))
    True
    """
    home = ExamineGamma().home
    target = os.path.join(os.path.expanduser('~'), '.pyrosar', 'gammaparse')
    if not os.path.isdir(target):
        os.makedirs(target)
    for module in finder(home, ['[A-Z]*'], foldermode=2):
        outfile = os.path.join(target,
                               os.path.basename(module).lower() + '.py')
        if not os.path.isfile(outfile):
            print('parsing module {} to {}'.format(os.path.basename(module),
                                                   outfile))
            for submodule in ['bin', 'scripts']:
                print('-' * 10 + '\n{}'.format(submodule))
                try:
                    parse_module(os.path.join(module, submodule), outfile)
                except OSError:
                    print('..does not exist')
            print('=' * 20)
    modules = [
        re.sub(r'\.py', '', os.path.basename(x))
        for x in finder(target, [r'[a-z]+\.py$'], regex=True)
    ]
    if len(modules) > 0:
        with open(os.path.join(target, '__init__.py'), 'w') as init:
            init.write('from . import {}'.format(', '.join(modules)))
コード例 #11
0
ファイル: auxdata.py プロジェクト: elomacorps/pyroSAR
 def __buildvrt(archives, vrtfile, pattern, vsi, extent, nodata):
     locals = [
         vsi + x for x in dissolve([finder(x, [pattern]) for x in archives])
     ]
     gdalbuildvrt(src=locals,
                  dst=vrtfile,
                  options={
                      'outputBounds': (extent['xmin'], extent['ymin'],
                                       extent['xmax'], extent['ymax']),
                      'srcNodata':
                      nodata
                  })
コード例 #12
0
 def __buildvrt(archives, vrtfile, pattern, vsi, extent, nodata=None, srs=None):
     locals = [vsi + x for x in dissolve([finder(x, [pattern]) for x in archives])]
     if nodata is None:
         with Raster(locals[0]) as ras:
             nodata = ras.nodata
     opts = {'outputBounds': (extent['xmin'], extent['ymin'],
                              extent['xmax'], extent['ymax']),
             'srcNodata': nodata}
     if srs is not None:
         opts['outputSRS'] = crsConvert(srs, 'wkt')
     gdalbuildvrt(src=locals, dst=vrtfile,
                  options=opts)
コード例 #13
0
def main():

    dirs = [
        "stack_savi"
    ]  #, "stack_ndvi", "stack_msavi", "stack_reip", "stack_rvi", "stack_dvi"]

    resolution = [30, 30]

    # shapefile (for stack boundaries)
    shp = 'F:/geodata/geo402/02_features/LADYBRAND_final_enlarged_study_area.shp'

    # store results in separate files or one single stack file? If separate then dstfile is used as a directory.
    sep = True

    for dir in dirs:

        # define input directory containing files to be stacked
        dir_in = 'F:/geodata/geo402/S2/xx_S2_indices/' + dir
        print(dir_in)
        os.makedirs(dir_in, exist_ok=True)

        # define output file name
        dstfile = 'F:/geodata/geo402/S2/xx_S2_indices/mosaics/' + dir
        print(dstfile)

        # list files to be resampled; those not overlapping with the shapefile geometry will excluded by function stack
        srcfiles = finder(dir_in, ['*'])

        # check whether dstfile is already a file
        if os.path.isfile(dstfile):
            raise IOError('dstfile already exists')

        # create groups of similar time stamps for mosaicking.
        # All images with a time stamp of less than 30s difference will be grouped
        groups = groupbyTime(srcfiles, seconds, 30)

        # final function call
        # groups will be mosaicked first
        # the resulting images will all have the same extent
        stack(srcfiles=groups,
              dstfile=dstfile,
              resampling='bilinear',
              targetres=resolution,
              srcnodata=-9999,
              dstnodata=-9999,
              shapefile=shp,
              sortfun=seconds,
              separate=sep,
              overwrite=True)
コード例 #14
0
def find_datasets(directory, recursive=False, **kwargs):
    """
    find pyroSAR datasets in a directory based on their metadata
    
    Parameters
    ----------
    directory: str
        the name of the directory to be searched
    recursive: bool
        search the directory recursively into subdirectories?
    kwargs:
        Metadata attributes for filtering the scene list supplied as `key=value`. e.g. `sensor='S1A'`.
        Multiple allowed options can be provided in tuples, e.g. `sensor=('S1A', 'S1B')`.
        Any types other than tuples require an exact match, e.g. `proc_steps=['grd', 'mli', 'geo', 'norm', 'db']`
        will be matched only if these processing steps are contained in the product name in this exact order.
        The special attributes `start` and `stop` can be used for time filtering where `start<=value<=stop`.
        See function :func:`parse_datasetname` for further options.
    
    Returns
    -------
    list of str
        the file names found in the directory and filtered by metadata attributes
    
    Examples
    --------
    >>> selection = find_datasets('path/to/files', sensor=('S1A', 'S1B'), polarization='VV')
    """
    files = finder(directory, [product_pattern],
                   regex=True,
                   recursive=recursive)
    selection = []
    for file in files:
        meta = parse_datasetname(file)
        matches = []
        for key, val in kwargs.items():
            if key == 'start':
                match = val <= meta['start']
            elif key == 'stop':
                match = val >= meta[
                    'start']  # only the start time stamp is contained in the filename
            elif isinstance(val, tuple):
                match = meta[key] in val
            else:
                match = meta[key] == val
            matches.append(match)
        if all(matches):
            selection.append(file)
    return selection
コード例 #15
0
    def getLocals(self, osvtype='POE'):
        """
        get a list of local files of specific type

        Parameters
        ----------
        osvtype: {'POE', 'RES'}
            the type of orbit files required

        Returns
        -------
        list
            a selection of local OSV files
        """
        directory = self._typeEvaluate(osvtype)
        return finder(directory, [self.pattern], regex=True)
コード例 #16
0
def main():
    '''
    both polarisations are included in the script but are stored in different folders
    '''

    resolution = [30, 30]

    # shapefile (for stack boundaries)
    shp = 'F:/geodata/geo402/##study_area/LADYBRAND_final_enlarged_study_area.shp'

    # store results in separate files or one single stack file? If separate then dstfile is used as a directory.
    sep = False

    # define input directory containing files to be stacked
    dir_in = 'F:/geodata/geo402/S1_GRD/xx_new/GRD_VH_vrts/'
    print(dir_in)
    # os.makedirs(dir_in, exist_ok=True)

    # define output file name
    dstfile = 'F:/geodata/geo402/S1_GRD/xx_new/S1A_IW_GRD_VH_stack'
    print(dstfile)

    # list files to be resampled; those not overlapping with the shapefile geometry will excluded by function stack
    srcfiles = finder(dir_in, ['*'])

    # check whether dstfile is already a file
    if os.path.isfile(dstfile):
        raise IOError('dstfile already exists')

    # create groups of similar time stamps for mosaicking.
    # All images with a time stamp of less than 30s difference will be grouped
    groups = groupbyTime(srcfiles, seconds, 30)

    # final function call
    # groups will be mosaicked first
    # the resulting images will all have the same extent
    stack(srcfiles=groups,
          dstfile=dstfile,
          resampling='bilinear',
          targetres=resolution,
          srcnodata=-99,
          dstnodata=-99,
          shapefile=shp,
          sortfun=seconds,
          separate=sep,
          overwrite=True,
          cores=7)
コード例 #17
0
ファイル: test_ancillary.py プロジェクト: ibaris/spatialist
def test_finder(tmpdir):
    dir = str(tmpdir)
    dir_sub1 = os.path.join(dir, 'testdir1')
    dir_sub2 = os.path.join(dir, 'testdir2')
    os.makedirs(dir_sub1)
    os.makedirs(dir_sub2)
    with open(os.path.join(dir_sub1, 'testfile1.txt'), 'w') as t1:
        t1.write('test')
    with open(os.path.join(dir_sub2, 'testfile2.txt'), 'w') as t2:
        t2.write('test')
    assert len(anc.finder(dir, ['test*'], foldermode=0)) == 2
    assert len(anc.finder(dir, ['test*'], foldermode=0, recursive=False)) == 0
    assert len(anc.finder(dir, ['test*'], foldermode=1)) == 4
    assert len(anc.finder(dir, ['test*'], foldermode=2)) == 2
    assert len(anc.finder([dir_sub1, dir_sub2], ['test*'])) == 2
    with pytest.raises(TypeError):
        anc.finder(1, [])
コード例 #18
0
    def mindate(self, osvtype='POE', datetype='start'):
        """
        return the earliest date of locally existing POE/RES files

        Parameters
        ----------
        osvtype: {'POE', 'RES'}
            the type of orbit files required
        datetype: {'publish', 'start', 'stop'}
            one of three possible date types contained in the OSV filename

        Returns
        -------
        str
            a timestamp in format YYYYmmddTHHMMSS
        """
        address, directory = self._typeEvaluate(osvtype)
        files = finder(directory, [self.pattern], regex=True)
        return min([self.date(x, datetype) for x in files]) if len(files) > 0 else None
コード例 #19
0
def parse_module(bindir, outfile):
    """
    parse all Gamma commands of a module to functions and save them to a Python script.

    Parameters
    ----------
    bindir: str
        the `bin` directory of a module containing the commands
    outfile: str
        the name of the Python file to write

    Returns
    -------
    
    Examples
    --------
    >>> import os
    >>> from pyroSAR.gamma.parser import parse_module
    >>> outname = os.path.join(os.environ['HOME'], 'isp.py')
    >>> parse_module('/cluster/GAMMA_SOFTWARE-20161207/ISP/bin', outname)
    """
    excludes = [
        'coord_trans', 'mosaic', 'lin_comb', 'lin_comb_cpx', 'validate'
    ]
    failed = []
    outstring = 'from pyroSAR.gamma.auxil import process\n\n\n'
    for cmd in sorted(finder(bindir, ['*']), key=lambda s: s.lower()):
        basename = os.path.basename(cmd)
        if basename not in excludes:
            # print(basename)
            try:
                fun = parse_command(cmd)
            except RuntimeError as e:
                failed.append('{0}: {1}'.format(basename, str(e)))
                continue
            outstring += fun + '\n\n'
    with open(outfile, 'w') as out:
        out.write(outstring)
    if len(failed) > 0:
        print('the following functions could not be parsed:\n{0}\n({1} total)'.
              format('\n'.join(failed), len(failed)))
コード例 #20
0
                                        product='GRD',
                                        acquisition_mode='IW',
                                        vv=1)

    print('{0}: {1} scenes found for site {2}'.format(socket.gethostname(),
                                                      len(selection_proc),
                                                      sitename))
    #######################################################################################
    # call to processing utility
    if len(selection_proc) > 1:
        print('start processing')

        for scene in selection_proc:
            geocode(infile=scene, outdir=outdir, tr=resolution, scaling='db')
    return len(selection_proc)


if __name__ == '__main__':
    #######################################################################################
    # update Sentinel-1 GRD scene archive database

    # define a directory containing zipped scene archives and list all files starting with 'S1A' or 'S1B'
    archive_s1 = '/.../sentinel1/GRD'
    scenes_s1 = finder(archive_s1, ['^S1[AB]'], regex=True, recursive=False)

    with Archive(dbfile) as archive:
        archive.insert(scenes_s1)
    #######################################################################################
    # start the processing
    results = list(futures.map(worker, sites))
コード例 #21
0
ファイル: auxil.py プロジェクト: elomacorps/pyroSAR
def gpt(xmlfile):
    """
    wrapper for ESA SNAP Graph Processing Tool GPT
    input is a readily formatted workflow xml file as created by function geocode in module snap.util
    """
    try:
        gpt_exec = ExamineSnap().gpt
    except AttributeError:
        raise RuntimeError('could not find SNAP GPT executable')

    with open(xmlfile, 'r') as infile:
        workflow = ET.fromstring(infile.read())
    write = workflow.find('.//node[@id="Write"]')
    outname = write.find('.//parameters/file').text
    outdir = os.path.dirname(outname)
    format = write.find('.//parameters/formatName').text
    infile = workflow.find('.//node[@id="Read"]/parameters/file').text

    if format == 'GeoTiff-BigTIFF':
        cmd = [
            gpt_exec,
            # '-Dsnap.dataio.reader.tileWidth=*',
            # '-Dsnap.dataio.reader.tileHeight=1',
            '-Dsnap.dataio.bigtiff.tiling.width=256',
            '-Dsnap.dataio.bigtiff.tiling.height=256',
            # '-Dsnap.dataio.bigtiff.compression.type=LZW',
            # '-Dsnap.dataio.bigtiff.compression.quality=0.75',
            xmlfile
        ]
    else:
        cmd = [gpt_exec, xmlfile]
    # print('- processing workflow {}'.format(os.path.basename(xmlfile)))
    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
    out, err = proc.communicate()
    out = out.decode('utf-8') if isinstance(out, bytes) else out
    err = err.decode('utf-8') if isinstance(err, bytes) else err
    if proc.returncode != 0:
        if os.path.isfile(outname + '.tif'):
            os.remove(outname + '.tif')
        elif os.path.isdir(outname):
            shutil.rmtree(outname)
        print(out + err)
        print('failed: {}'.format(os.path.basename(infile)))
        err_match = re.search('Error: (.*)\n', out + err)
        errmessage = err_match.group(1) if err_match else err
        raise RuntimeError(errmessage)
    if format == 'ENVI':
        # print('- converting to GTiff')
        suffix = parse_suffix(workflow)
        translateoptions = {
            'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
            'format': 'GTiff',
            'noData': 0
        }
        for item in finder(outname, ['*.img']):
            pol = re.search('[HV]{2}', item).group()
            name_new = outname.replace(suffix,
                                       '{0}_{1}.tif'.format(pol, suffix))
            gdal_translate(item, name_new, translateoptions)
        shutil.rmtree(outname)
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
コード例 #22
0
ファイル: util.py プロジェクト: spatialtrail/pyroSAR
def convert2gamma(id,
                  directory,
                  S1_noiseremoval=True,
                  logpath=None,
                  outdir=None,
                  shellscript=None):
    """
    general function for converting SAR images to GAMMA format

    Parameters
    ----------
    id: ~pyroSAR.drivers.ID
        an SAR scene object of type pyroSAR.ID or any subclass
    directory: str
        the output directory for the converted images
    S1_noiseremoval: bool
        only Sentinel-1: should noise removal be applied to the image?
    logpath: str or None
        a directory to write command logfiles to
    outdir: str or None
        the directory to execute the command in
    shellscript: str or None
        a file to write the Gamma commands to in shell format

    Returns
    -------

    """

    if not isinstance(id, ID):
        raise IOError('id must be of type pyroSAR.ID')

    if id.compression is not None:
        raise RuntimeError('scene is not yet unpacked')

    if not os.path.isdir(directory):
        os.makedirs(directory)

    if isinstance(id, CEOS_ERS):
        if id.sensor in ['ERS1', 'ERS2']:
            if id.product == 'SLC' and id.meta['proc_system'] in [
                    'PGS-ERS', 'VMP-ERS', 'SPF-ERS'
            ]:
                basename = '{}_{}_{}'.format(id.outname_base(),
                                             id.polarizations[0],
                                             id.product.lower())
                outname = os.path.join(directory, basename)
                if not os.path.isfile(outname):
                    lea = id.findfiles('LEA_01.001')[0]
                    dat = id.findfiles('DAT_01.001')[0]
                    title = re.sub(r'\.PS$', '', os.path.basename(id.file))

                    isp.par_ESA_ERS(CEOS_SAR_leader=lea,
                                    SLC_par=outname + '.par',
                                    CEOS_DAT=dat,
                                    SLC=outname,
                                    inlist=[title],
                                    logpath=logpath,
                                    outdir=outdir,
                                    shellscript=shellscript)
                else:
                    print('scene already converted')
            else:
                raise NotImplementedError(
                    'ERS {} product of {} processor in CEOS format not implemented yet'
                    .format(id.product, id.meta['proc_system']))
        else:
            raise NotImplementedError(
                'sensor {} in CEOS format not implemented yet'.format(
                    id.sensor))

    elif isinstance(id, CEOS_PSR):
        images = id.findfiles('^IMG-')
        if id.product == '1.0':
            raise RuntimeError('PALSAR level 1.0 products are not supported')
        for image in images:
            polarization = re.search('[HV]{2}',
                                     os.path.basename(image)).group(0)
            if id.product == '1.1':
                outname_base = '{}_{}_slc'.format(id.outname_base(),
                                                  polarization)
                outname = os.path.join(directory, outname_base)

                isp.par_EORC_PALSAR(CEOS_leader=id.file,
                                    SLC_par=outname + '.par',
                                    CEOS_data=image,
                                    SLC=outname,
                                    logpath=logpath,
                                    outdir=outdir,
                                    shellscript=shellscript)
            else:
                outname_base = '{}_{}_mli_geo'.format(id.outname_base(),
                                                      polarization)
                outname = os.path.join(directory, outname_base)

                diff.par_EORC_PALSAR_geo(CEOS_leader=id.file,
                                         MLI_par=outname + '.par',
                                         DEM_par=outname + '_dem.par',
                                         CEOS_data=image,
                                         MLI=outname,
                                         logpath=logpath,
                                         outdir=outdir,
                                         shellscript=shellscript)
            par2hdr(outname + '.par', outname + '.hdr')

    elif isinstance(id, ESA):
        """
        the command par_ASAR also accepts a K_dB argument for calibration in which case the resulting image names will carry the suffix GRD;
        this is not implemented here but instead in function calibrate
        """
        outname = os.path.join(directory, id.outname_base())
        if not id.is_processed(directory):

            isp.par_ASAR(ASAR_ERS_file=os.path.basename(id.file),
                         output_name=outname,
                         outdir=os.path.dirname(id.file),
                         logpath=logpath,
                         shellscript=shellscript)

            os.remove(outname + '.hdr')
            for item in finder(directory, [os.path.basename(outname)],
                               regex=True):
                ext = '.par' if item.endswith('.par') else ''
                base = os.path.basename(item).strip(ext)
                base = base.replace('.', '_')
                base = base.replace('PRI', 'pri')
                base = base.replace('SLC', 'slc')
                newname = os.path.join(directory, base + ext)
                os.rename(item, newname)
                if newname.endswith('.par'):
                    par2hdr(newname, newname.replace('.par', '.hdr'))
        else:
            raise IOError('scene already processed')

    elif isinstance(id, SAFE):
        if id.product == 'OCN':
            raise IOError('Sentinel-1 OCN products are not supported')
        if id.meta['category'] == 'A':
            raise IOError(
                'Sentinel-1 annotation-only products are not supported')

        for xml_ann in finder(os.path.join(id.scene, 'annotation'),
                              [id.pattern_ds],
                              regex=True):
            base = os.path.basename(xml_ann)
            match = re.compile(id.pattern_ds).match(base)

            tiff = os.path.join(id.scene, 'measurement',
                                base.replace('.xml', '.tiff'))
            xml_cal = os.path.join(id.scene, 'annotation', 'calibration',
                                   'calibration-' + base)

            product = match.group('product')

            # specify noise calibration file
            # L1 GRD product: thermal noise already subtracted, specify xml_noise to add back thermal noise
            # SLC products: specify noise file to remove noise
            # xml_noise = '-': noise file not specified
            if (S1_noiseremoval
                    and product == 'slc') or (not S1_noiseremoval
                                              and product == 'grd'):
                xml_noise = os.path.join(id.scene, 'annotation', 'calibration',
                                         'noise-' + base)
            else:
                xml_noise = '-'

            fields = (id.outname_base(), match.group('pol').upper(), product)
            name = os.path.join(directory, '_'.join(fields))

            pars = {
                'GeoTIFF': tiff,
                'annotation_XML': xml_ann,
                'calibration_XML': xml_cal,
                'noise_XML': xml_noise,
                'logpath': logpath,
                'shellscript': shellscript,
                'outdir': outdir
            }

            if product == 'slc':
                swath = match.group('swath').upper()
                name = name.replace(
                    '{:_<{length}}'.format(id.acquisition_mode,
                                           length=len(swath)), swath)
                pars['SLC'] = name
                pars['SLC_par'] = name + '.par'
                pars['TOPS_par'] = name + '.tops_par'
                isp.par_S1_SLC(**pars)
            else:
                pars['MLI'] = name
                pars['MLI_par'] = name + '.par'
                isp.par_S1_GRD(**pars)

            par2hdr(name + '.par', name + '.hdr')

    elif isinstance(id, TSX):
        images = id.findfiles(id.pattern_ds)
        pattern = re.compile(id.pattern_ds)
        for image in images:
            pol = pattern.match(os.path.basename(image)).group('pol')
            outname = os.path.join(directory, id.outname_base() + '_' + pol)

            pars = {
                'annotation_XML': id.file,
                'pol': pol,
                'logpath': logpath,
                'shellscript': shellscript,
                'outdir': outdir
            }

            if id.product == 'SSC':
                outname += '_slc'
                pars['COSAR'] = image
                pars['SLC_par'] = outname + '.par'
                pars['SLC'] = outname
                isp.par_TX_SLC(**pars)

            elif id.product == 'MGD':
                outname += '_mli'
                pars['GeoTIFF'] = image
                pars['GRD_par'] = outname + '.par'
                pars['GRD'] = outname
                isp.par_TX_GRD(**pars)

            elif id.product in ['GEC', 'EEC']:
                outname += '_mli_geo'
                pars['GeoTIFF'] = image
                pars['MLI_par'] = outname + '.par'
                pars['DEM_par'] = outname + '_dem.par'
                pars['GEO'] = outname
                diff.par_TX_geo(**pars)
            else:
                raise RuntimeError('unknown product: {}'.format(id.product))

            par2hdr(outname + '.par', outname + '.hdr')
    else:
        raise NotImplementedError(
            'conversion for class {} is not implemented yet'.format(
                type(id).__name__))
コード例 #23
0
def gpt(xmlfile, groups=None, cleanup=True):
    """
    wrapper for ESA SNAP's Graph Processing Tool GPT.
    Input is a readily formatted workflow XML file as
    created by function :func:`~pyroSAR.snap.util.geocode`.
    Additional to calling GPT, this function will
    
     * execute the workflow in groups as defined by `groups`
     * encode a nodata value into the output file if the format is GeoTiff-BigTIFF
     * convert output files to GeoTiff if the output format is ENVI
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    groups: list
        a list of lists each containing IDs for individual nodes
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    
    Returns
    -------

    """
    
    workflow = Workflow(xmlfile)
    write = workflow['Write']
    outname = write.parameters['file']
    format = write.parameters['formatName']
    dem_name = workflow.tree.find('.//demName').text
    if dem_name == 'External DEM':
        dem_nodata = float(workflow.tree.find('.//externalDEMNoDataValue').text)
    else:
        dem_nodata = 0
    print('executing node sequence{}..'.format('s' if groups is not None else ''))
    if groups is not None:
        subs = split(xmlfile, groups)
        for sub in subs:
            execute(sub, cleanup=cleanup)
    else:
        execute(xmlfile, cleanup=cleanup)
    
    if format == 'ENVI':
        print('converting to GTiff')
        suffix = workflow.suffix
        translateoptions = {'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
                            'format': 'GTiff'}
        for item in finder(outname, ['*.img'], recursive=False):
            if re.search('[HV]{2}', item):
                pol = re.search('[HV]{2}', item).group()
                name_new = outname.replace(suffix, '{0}_{1}.tif'.format(pol, suffix))
            else:
                base = os.path.splitext(os.path.basename(item))[0] \
                    .replace('elevation', 'DEM')
                name_new = outname.replace(suffix, '{0}.tif'.format(base))
            nodata = dem_nodata if re.search('elevation', item) else 0
            translateoptions['noData'] = nodata
            gdal_translate(item, name_new, translateoptions)
        if cleanup:
            shutil.rmtree(outname)
    # by default the nodata value is not registered in the GTiff metadata
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
    print('done')
コード例 #24
0
ファイル: auxil.py プロジェクト: waynechao128/pyroSAR
def gpt(xmlfile,
        groups=None,
        cleanup=True,
        gpt_exceptions=None,
        removeS1BorderNoiseMethod='pyroSAR'):
    """
    wrapper for ESA SNAP's Graph Processing Tool GPT.
    Input is a readily formatted workflow XML file as
    created by function :func:`~pyroSAR.snap.util.geocode`.
    Additional to calling GPT, this function will
    
     * execute the workflow in groups as defined by `groups`
     * encode a nodata value into the output file if the format is GeoTiff-BigTIFF
     * convert output files to GeoTiff if the output format is ENVI
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    groups: list
        a list of lists each containing IDs for individual nodes
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    gpt_exceptions: dict
        a dictionary to override the configured GPT executable for certain operators;
        each (sub-)workflow containing this operator will be executed with the define executable;
        
         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``
    removeS1BorderNoiseMethod: str
        the border noise removal method to be applied, See :func:`pyroSAR.S1.removeGRDBorderNoise` for details; one of the following:
         - 'ESA': the pure implementation as described by ESA
         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement
    
    Returns
    -------

    """

    workflow = Workflow(xmlfile)
    write = workflow['Write']
    outname = write.parameters['file']
    suffix = workflow.suffix
    format = write.parameters['formatName']
    dem_name = workflow.tree.find('.//demName').text
    if dem_name == 'External DEM':
        dem_nodata = float(
            workflow.tree.find('.//externalDEMNoDataValue').text)
    else:
        dem_nodata = 0

    if 'Remove-GRD-Border-Noise' in workflow.ids and removeS1BorderNoiseMethod == 'pyroSAR':
        if 'SliceAssembly' in workflow.operators:
            raise RuntimeError(
                "pyroSAR's custom border noise removal is not yet implemented for multiple scene inputs"
            )
        xmlfile = os.path.join(outname,
                               os.path.basename(xmlfile.replace('_bnr', '')))
        if not os.path.isdir(outname):
            os.makedirs(outname)
        # border noise removal is done outside of SNAP and the node is thus removed from the workflow
        del workflow['Remove-GRD-Border-Noise']
        # remove the node name from the groups
        i = 0
        while i < len(groups) - 1:
            if 'Remove-GRD-Border-Noise' in groups[i]:
                del groups[i][groups[i].index('Remove-GRD-Border-Noise')]
            if len(groups[i]) == 0:
                del groups[i]
            else:
                i += 1
        # identify the input scene, unpack it and perform the custom border noise removal
        read = workflow['Read']
        scene = identify(read.parameters['file'])
        print('unpacking scene')
        scene.unpack(outname)
        print('removing border noise..')
        scene.removeGRDBorderNoise(method=removeS1BorderNoiseMethod)
        # change the name of the input file to that of the unpacked archive
        read.parameters['file'] = scene.scene
        # write a new workflow file
        workflow.write(xmlfile)

    print('executing node sequence{}..'.format(
        's' if groups is not None else ''))
    if groups is not None:
        subs = split(xmlfile, groups)
        for sub in subs:
            execute(sub, cleanup=cleanup, gpt_exceptions=gpt_exceptions)
    else:
        execute(xmlfile, cleanup=cleanup, gpt_exceptions=gpt_exceptions)

    if format == 'ENVI':
        print('converting to GTiff')
        translateoptions = {
            'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
            'format': 'GTiff'
        }
        for item in finder(outname, ['*.img'], recursive=False):
            if re.search('[HV]{2}', item):
                pol = re.search('[HV]{2}', item).group()
                name_new = outname.replace(suffix,
                                           '{0}_{1}.tif'.format(pol, suffix))
            else:
                base = os.path.splitext(os.path.basename(item))[0] \
                    .replace('elevation', 'DEM')
                name_new = outname.replace(suffix, '{0}.tif'.format(base))
            nodata = dem_nodata if re.search('elevation', item) else 0
            translateoptions['noData'] = nodata
            gdal_translate(item, name_new, translateoptions)
    # by default the nodata value is not registered in the GTiff metadata
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
    if cleanup:
        shutil.rmtree(outname)
    ###########################################################################
    # write the Sentinel-1 manifest.safe file as addition to the actual product
    attrs = parse_datasetname(outname)
    ext = '' if attrs['extensions'] is None else attrs['extensions']
    readers = workflow['operator=Read']
    for reader in readers:
        infile = reader.parameters['file']
        try:
            id = identify(infile)
            if id.sensor in ['S1A', 'S1B']:
                manifest = id.getFileObj(id.findfiles('manifest.safe')[0])
                basename = '_'.join([id.outname_base() + ext, 'manifest.safe'])
                outdir = os.path.dirname(outname)
                outname_manifest = os.path.join(outdir, basename)
                with open(outname_manifest, 'wb') as out:
                    out.write(manifest.read())
        except RuntimeError:
            continue
    ###########################################################################
    print('done')
コード例 #25
0
def test_finder(tmpdir, testdata):
    dir = str(tmpdir)
    dir_sub1 = os.path.join(dir, 'testdir1')
    dir_sub2 = os.path.join(dir, 'testdir2')
    os.makedirs(dir_sub1)
    os.makedirs(dir_sub2)
    with open(os.path.join(dir_sub1, 'testfile1.txt'), 'w') as t1:
        t1.write('test')
    with open(os.path.join(dir_sub2, 'testfile2.txt'), 'w') as t2:
        t2.write('test')
    assert len(anc.finder(dir, ['test*'], foldermode=0)) == 2
    assert len(anc.finder(dir, ['test*'], foldermode=0, recursive=False)) == 0
    assert len(anc.finder(dir, ['test*'], foldermode=1)) == 4
    assert len(anc.finder(dir, ['test*'], foldermode=2)) == 2
    assert len(anc.finder([dir_sub1, dir_sub2], ['test*'])) == 2

    assert len(anc.finder(testdata['zip'], ['file*'])) == 3
    assert len(anc.finder(testdata['zip'], ['*'], foldermode=1)) == 5
    assert len(anc.finder(testdata['zip'], ['[a-z]{1}'], foldermode=2, regex=True)) == 2
    
    assert len(anc.finder(testdata['tar'], ['file*'])) == 3
    assert len(anc.finder(testdata['tar'], ['*'], foldermode=1)) == 5
    assert len(anc.finder(testdata['tar'], ['[a-z]{1}'], foldermode=2, regex=True)) == 2
    
    with pytest.raises(TypeError):
        anc.finder(1, [])
    
    with pytest.raises(ValueError):
        anc.finder(dir, ['test*'], foldermode=3)
    
    with pytest.raises(RuntimeError):
        anc.finder('foobar', ['test*'], foldermode=2)
    
    with pytest.raises(RuntimeError):
        anc.finder(testdata['tif'], ['test*'], foldermode=2)
コード例 #26
0
def gpt(xmlfile, groups=None, cleanup=True, gpt_exceptions=None):
    """
    wrapper for ESA SNAP's Graph Processing Tool GPT.
    Input is a readily formatted workflow XML file as
    created by function :func:`~pyroSAR.snap.util.geocode`.
    Additional to calling GPT, this function will
    
     * execute the workflow in groups as defined by `groups`
     * encode a nodata value into the output file if the format is GeoTiff-BigTIFF
     * convert output files to GeoTiff if the output format is ENVI
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    groups: list
        a list of lists each containing IDs for individual nodes
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    gpt_exceptions: dict
        a dictionary to override the configured GPT executable for certain operators;
        each (sub-)workflow containing this operator will be executed with the define executable;
        
         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``
    
    Returns
    -------

    """

    workflow = Workflow(xmlfile)
    write = workflow['Write']
    outname = write.parameters['file']
    suffix = workflow.suffix
    format = write.parameters['formatName']
    dem_name = workflow.tree.find('.//demName').text
    if dem_name == 'External DEM':
        dem_nodata = float(
            workflow.tree.find('.//externalDEMNoDataValue').text)
    else:
        dem_nodata = 0

    if 'Remove-GRD-Border-Noise' in workflow.ids:
        xmlfile = os.path.join(outname,
                               os.path.basename(xmlfile.replace('_bnr', '')))
        if not os.path.isdir(outname):
            os.makedirs(outname)
        # border noise removal is done outside of SNAP and the node is thus removed from the workflow
        del workflow['Remove-GRD-Border-Noise']
        # remove the node name from the groups
        i = 0
        while i < len(groups) - 1:
            if 'Remove-GRD-Border-Noise' in groups[i]:
                del groups[i][groups[i].index('Remove-GRD-Border-Noise')]
            if len(groups[i]) == 0:
                del groups[i]
            else:
                i += 1
        # identify the input scene, unpack it and perform the custom border noise removal
        read = workflow['Read']
        scene = identify(read.parameters['file'])
        print('unpacking scene')
        scene.unpack(outname)
        print('removing border noise..')
        scene.removeGRDBorderNoise()
        # change the name of the input file to that of the unpacked archive
        read.parameters['file'] = scene.scene
        # write a new workflow file
        workflow.write(xmlfile)

    print('executing node sequence{}..'.format(
        's' if groups is not None else ''))
    if groups is not None:
        subs = split(xmlfile, groups)
        for sub in subs:
            execute(sub, cleanup=cleanup, gpt_exceptions=gpt_exceptions)
    else:
        execute(xmlfile, cleanup=cleanup, gpt_exceptions=gpt_exceptions)

    if format == 'ENVI':
        print('converting to GTiff')
        translateoptions = {
            'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
            'format': 'GTiff'
        }
        for item in finder(outname, ['*.img'], recursive=False):
            if re.search('[HV]{2}', item):
                pol = re.search('[HV]{2}', item).group()
                name_new = outname.replace(suffix,
                                           '{0}_{1}.tif'.format(pol, suffix))
            else:
                base = os.path.splitext(os.path.basename(item))[0] \
                    .replace('elevation', 'DEM')
                name_new = outname.replace(suffix, '{0}.tif'.format(base))
            nodata = dem_nodata if re.search('elevation', item) else 0
            translateoptions['noData'] = nodata
            gdal_translate(item, name_new, translateoptions)
    # by default the nodata value is not registered in the GTiff metadata
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
    if cleanup:
        shutil.rmtree(outname)
    print('done')
コード例 #27
0
ファイル: dem.py プロジェクト: lidahuilidahui/pyroSAR
def hgt_collect(parfiles, outdir, demdir=None, arcsec=3):
    """
    automatic downloading and unpacking of srtm tiles

    Parameters
    ----------
    parfiles: list of str or pyroSAR.ID
        a list of Gamma parameter files or pyroSAR ID objects
    outdir: str
        a target directory to download the tiles to
    demdir: str or None
        an additional directory already containing hgt tiles
    arcsec: {1, 3}
        the spatial resolution to be used

    Returns
    -------
    list
        the names of all local hgt tiles overlapping with the parfiles
    """
    
    # concatenate required hgt tile names
    target_ids = hgt(parfiles)
    
    targets = []
    
    pattern = '[NS][0-9]{2}[EW][0-9]{3}'
    
    # if an additional dem directory has been defined, check this directory for required hgt tiles
    if demdir is not None:
        targets.extend(finder(demdir, target_ids))
    
    # check for additional potentially existing hgt tiles in the defined output directory
    extras = [os.path.join(outdir, x) for x in target_ids if
              os.path.isfile(os.path.join(outdir, x)) and not re.search(x, '\n'.join(targets))]
    targets.extend(extras)
    
    print('found {} relevant SRTM tiles...'.format(len(targets)))
    
    # search server for all required tiles, which were not found in the local directories
    if len(targets) < len(target_ids):
        print('searching for additional SRTM tiles on the server...')
        onlines = []
        
        if arcsec == 1:
            remotes = ['http://e4ftl01.cr.usgs.gov/SRTM/SRTMGL1.003/2000.02.11/']
            remotepattern = pattern + '.SRTMGL1.hgt.zip'
        elif arcsec == 3:
            server = 'https://dds.cr.usgs.gov/srtm/version2_1/SRTM3/'
            remotes = [os.path.join(server, x) for x in
                       ['Africa', 'Australia', 'Eurasia', 'Islands', 'North_America', 'South_America']]
            remotepattern = pattern + '[.]hgt.zip'
        else:
            raise ValueError('argument arcsec must be of value 1 or 3')
        
        for remote in remotes:
            response = urlopen(remote).read()
            items = sorted(set(re.findall(remotepattern, response)))
            for item in items:
                outname = re.findall(pattern, item)[0] + '.hgt'
                if outname in target_ids and outname not in [os.path.basename(x) for x in targets]:
                    onlines.append(os.path.join(remote, item))
        
        # if additional tiles have been found online, download and unzip them to the local directory
        if len(onlines) > 0:
            print('downloading {} SRTM tiles...'.format(len(onlines)))
            for candidate in onlines:
                localname = os.path.join(outdir, re.findall(pattern, candidate)[0] + '.hgt')
                infile = urlopen(candidate)
                with open(localname + '.zip', 'wb') as outfile:
                    outfile.write(infile.read())
                infile.close()
                with zf.ZipFile(localname + '.zip', 'r') as z:
                    z.extractall(outdir)
                os.remove(localname + '.zip')
                targets.append(localname)
    return targets
コード例 #28
0
def gpt(xmlfile,
        outdir,
        groups=None,
        cleanup=True,
        gpt_exceptions=None,
        gpt_args=None,
        removeS1BorderNoiseMethod='pyroSAR',
        basename_extensions=None,
        multisource=False):
    """
    wrapper for ESA SNAP's Graph Processing Tool GPT.
    Input is a readily formatted workflow XML file as
    created by function :func:`~pyroSAR.snap.util.geocode`.
    Additional to calling GPT, this function will
    
     * execute the workflow in groups as defined by `groups`
     * encode a nodata value into the output file if the format is GeoTiff-BigTIFF
     * convert output files to GeoTiff if the output format is ENVI
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    outdir: str
        the directory into which to write the final files
    groups: list
        a list of lists each containing IDs for individual nodes
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    gpt_exceptions: dict
        a dictionary to override the configured GPT executable for certain operators;
        each (sub-)workflow containing this operator will be executed with the define executable;
        
         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``
    gpt_args: list or None
        a list of additional arguments to be passed to the gpt call
        
        - e.g. ``['-x', '-c', '2048M']`` for increased tile cache size and intermediate clearing
    removeS1BorderNoiseMethod: str
        the border noise removal method to be applied, See :func:`pyroSAR.S1.removeGRDBorderNoise` for details; one of the following:
         - 'ESA': the pure implementation as described by ESA
         - 'pyroSAR': the ESA method plus the custom pyroSAR refinement
    basename_extensions: list of str
        names of additional parameters to append to the basename, e.g. ['orbitNumber_rel']
    
    Returns
    -------
    
    Raises
    ------
    RuntimeError
    """

    workflow = Workflow(xmlfile)

    if multisource:
        read = workflow['ProductSet-Reader']
        # scene = identify_many(read.parameters['fileList'].split(",")) # not working
    elif not multisource:
        read = workflow['Read']
        scene = identify(read.parameters['file'])

    write = workflow['Write']
    tmpname = write.parameters['file']
    suffix = workflow.suffix()
    format = write.parameters['formatName']
    dem_name = workflow.tree.find('.//demName')
    if dem_name is not None:
        if dem_name.text == 'External DEM':
            dem_nodata = float(
                workflow.tree.find('.//externalDEMNoDataValue').text)
        else:
            dem_nodata = 0

    if 'Remove-GRD-Border-Noise' in workflow.ids \
            and removeS1BorderNoiseMethod == 'pyroSAR' \
            and scene.meta['IPF_version'] < 2.9:
        if 'SliceAssembly' in workflow.operators:
            raise RuntimeError(
                "pyroSAR's custom border noise removal is not yet implemented for multiple scene inputs"
            )
        xmlfile = os.path.join(tmpname,
                               os.path.basename(xmlfile.replace('_bnr', '')))
        os.makedirs(tmpname, exist_ok=True)
        # border noise removal is done outside of SNAP and the node is thus removed from the workflow
        del workflow['Remove-GRD-Border-Noise']
        # remove the node name from the groups
        i = 0
        while i < len(groups) - 1:
            if 'Remove-GRD-Border-Noise' in groups[i]:
                del groups[i][groups[i].index('Remove-GRD-Border-Noise')]
            if len(groups[i]) == 0:
                del groups[i]
            else:
                i += 1
        # unpack the scene if necessary and perform the custom border noise removal
        print('unpacking scene')
        if scene.compression is not None:
            scene.unpack(tmpname)
        print('removing border noise..')
        scene.removeGRDBorderNoise(method=removeS1BorderNoiseMethod)
        # change the name of the input file to that of the unpacked archive
        read.parameters['file'] = scene.scene
        # write a new workflow file
        workflow.write(xmlfile)

    print('executing node sequence{}..'.format(
        's' if groups is not None else ''))
    try:
        if groups is not None:
            tmpdir = os.path.join(tmpname, 'tmp')
            subs = split(xmlfile, groups, tmpdir)
            for sub in subs:
                execute(sub,
                        cleanup=cleanup,
                        gpt_exceptions=gpt_exceptions,
                        gpt_args=gpt_args)
        else:
            execute(xmlfile,
                    cleanup=cleanup,
                    gpt_exceptions=gpt_exceptions,
                    gpt_args=gpt_args)
    except RuntimeError as e:
        if cleanup and os.path.exists(tmpname):
            shutil.rmtree(tmpname, onerror=windows_fileprefix)
        raise RuntimeError(str(e) + '\nfailed: {}'.format(xmlfile))

    outname = os.path.join(outdir, os.path.basename(tmpname))

    if format == 'ENVI':
        print('converting to GTiff')
        translateoptions = {
            'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'],
            'format': 'GTiff'
        }
        for item in finder(tmpname, ['*.img'], recursive=False):
            if re.search('ma0_[HV]{2}', item):
                pol = re.search('[HV]{2}', item).group()
                name_new = outname.replace(suffix,
                                           '{0}_{1}.tif'.format(pol, suffix))
                if 'Sigma0' in item:
                    name_new = name_new.replace('TF_', '')
            else:
                base = os.path.splitext(os.path.basename(item))[0] \
                    .replace('elevation', 'DEM')
                if re.search('layover_shadow_mask', base):
                    base = re.sub('layover_shadow_mask_[HV]{2}',
                                  'layoverShadowMask', base)
                if re.search('scatteringArea', base):
                    base = re.sub('scatteringArea_[HV]{2}', 'scatteringArea',
                                  base)
                name_new = outname.replace(suffix, '{0}.tif'.format(base))
            nodata = dem_nodata if re.search('elevation', item) else 0
            translateoptions['noData'] = nodata
            gdal_translate(item, name_new, translateoptions)
    # by default the nodata value is not registered in the GTiff metadata
    elif format == 'GeoTiff-BigTIFF':
        ras = gdal.Open(outname + '.tif', GA_Update)
        for i in range(1, ras.RasterCount + 1):
            ras.GetRasterBand(i).SetNoDataValue(0)
        ras = None
    ###########################################################################
    # write the Sentinel-1 manifest.safe file as addition to the actual product
    if not multisource:
        readers = workflow['operator=Read']
        for reader in readers:
            infile = reader.parameters['file']
            try:
                id = identify(infile)
                if id.sensor in ['S1A', 'S1B']:
                    manifest = id.getFileObj(id.findfiles('manifest.safe')[0])
                    basename = id.outname_base(basename_extensions)
                    basename = '{0}_manifest.safe'.format(basename)
                    outname_manifest = os.path.join(outdir, basename)
                    with open(outname_manifest, 'wb') as out:
                        out.write(manifest.read())
            except RuntimeError:
                continue
    ###########################################################################
    if cleanup and os.path.exists(tmpname):
        shutil.rmtree(tmpname, onerror=windows_fileprefix)
    print('done')