Ejemplo n.º 1
0
 def suffix(self, stop=None):
     """
     Get the SNAP operator suffix sequence
     
     Parameters
     ----------
     stop: str
         the ID of the last workflow node
     
     Returns
     -------
     str
         a file suffix created from the order of which the nodes will be executed
     """
     nodes = self.tree.findall('node')
     names = [re.sub(r'[ ]*\([0-9]+\)', '', y.attrib['id']) for y in nodes]
     names_unique = []
     for name in names:
         if name not in names_unique:
             names_unique.append(name)
         if name == stop:
             break
     config = ExamineSnap()
     suffix = '_'.join(
         filter(None, [config.get_suffix(x) for x in names_unique]))
     return suffix
Ejemplo n.º 2
0
 def suffix(self):
     """
     
     Returns
     -------
     str
         a file suffix created from the order of which the nodes will be executed
     """
     nodes = self.tree.findall('node')
     names = [re.sub(r'[ ]*\([0-9]+\)', '', y.attrib['id']) for y in nodes]
     names_unique = []
     for name in names:
         if name not in names_unique:
             names_unique.append(name)
     config = ExamineSnap()
     suffix = '_'.join(
         filter(None, [config.get_suffix(x) for x in names_unique]))
     return suffix
Ejemplo n.º 3
0
 def __init__(self, geometries):
     if not isinstance(geometries, list):
         raise RuntimeError('geometries must be of type list')
     
     for geometry in geometries:
         if geometry.getProjection('epsg') != 4326:
             raise RuntimeError('input geometry CRS must be WGS84 LatLon (EPSG 4326)')
     
     self.geometries = geometries
     try:
         self.auxdatapath = ExamineSnap().auxdatapath
     except AttributeError:
         self.auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
Ejemplo n.º 4
0
 def __init__(self, osvdir=None, timeout=20):
     self.timeout = timeout
     if osvdir is None:
         try:
             auxdatapath = ExamineSnap().auxdatapath
         except AttributeError:
             auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
         osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')
     self.outdir_poe = os.path.join(osvdir, 'POEORB')
     self.outdir_res = os.path.join(osvdir, 'RESORB')
     self.pattern = r'S1[AB]_OPER_AUX_(?:POE|RES)ORB_OPOD_[0-9TV_]{48}\.EOF'
     self.pattern_fine = r'(?P<sensor>S1[AB])_OPER_AUX_' \
                         r'(?P<type>(?:POE|RES)ORB)_OPOD_' \
                         r'(?P<publish>[0-9]{8}T[0-9]{6})_V' \
                         r'(?P<start>[0-9]{8}T[0-9]{6})_' \
                         r'(?P<stop>[0-9]{8}T[0-9]{6})\.EOF'
     self._init_dir()
     self._reorganize()
Ejemplo n.º 5
0
def get_egm96_lookup():
    """
    If not found, download SNAP's lookup table for converting EGM96 geoid heights to WGS84 ellipsoid heights
    
    Returns
    -------

    """
    try:
        auxdatapath = ExamineSnap().auxdatapath
    except AttributeError:
        auxdatapath = os.path.join(os.path.expanduser('~'), '.snap', 'auxdata')
    local = os.path.join(auxdatapath, 'dem', 'egm96', 'ww15mgh_b.zip')
    os.makedirs(os.path.dirname(local), exist_ok=True)
    if not os.path.isfile(local):
        remote = 'http://step.esa.int/auxdata/dem/egm96/ww15mgh_b.zip'
        print('{} <<-- {}'.format(local, remote))
        r = requests.get(remote)
        with open(local, 'wb') as out:
            out.write(r.content)
Ejemplo n.º 6
0
def test_installation():
    reg = ExamineSnap()
    assert os.path.isfile(reg.gpt)
Ejemplo n.º 7
0
def parse_node(name, use_existing=True):
    """
    parse an XML node recipe. The XML representation and parameter default values are read from the docstring of an
    individual node by calling `gpt <node> -h`. The result is then written to an XML text file under
    `$HOME/.pyroSAR/snap/nodes` which is subsequently read for parsing instead of again calling `gpt`.
    
    Parameters
    ----------
    name: str
        the name of the processing node, e.g. Terrain-Correction
    use_existing: bool
        use an existing XML text file or force re-parsing the gpt docstring and overwriting the XML file?

    Returns
    -------
    Node
        the parsed node
    
    Examples
    --------
    >>> tnr = parse_node('ThermalNoiseRemoval')
    >>> print(tnr.parameters)
    {'selectedPolarisations': None, 'removeThermalNoise': 'true', 'reIntroduceThermalNoise': 'false'}
    """
    name = name if name.endswith('.xml') else name + '.xml'
    operator = os.path.splitext(name)[0]
    abspath = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap',
                           'nodes')
    os.makedirs(abspath, exist_ok=True)
    absname = os.path.join(abspath, name)

    if not os.path.isfile(absname) or not use_existing:
        gpt = ExamineSnap().gpt

        cmd = [gpt, operator, '-h']

        out, err = run(cmd=cmd, void=False)

        graph = re.search('<graph id.*', out, flags=re.DOTALL).group()
        graph = re.sub(r'>\${.*', '/>',
                       graph)  # remove placeholder values like ${value}
        graph = re.sub(r'<\.\.\./>.*', '', graph)  # remove <.../> placeholders
        if operator == 'BandMaths':
            graph = graph.replace('sourceProducts', 'sourceProduct')
        tree = ET.fromstring(graph)
        for elt in tree.iter():
            if elt.text in ['string', 'double', 'integer', 'float']:
                elt.text = None
        node = tree.find('node')
        node.attrib['id'] = operator
        # add a second source product entry for multi-source nodes
        # multi-source nodes are those with an entry 'sourceProducts' instead of 'sourceProduct'
        # exceptions are registered in this list:
        multisource = ['Back-Geocoding']
        if operator != 'Read' and operator != 'ProductSet-Reader':
            source = node.find('.//sources')
            child = source[0]
            if child.tag == 'sourceProducts' or operator in multisource:
                child2 = ET.SubElement(source, 'sourceProduct.1',
                                       {'refid': 'Read (2)'})
            child.tag = 'sourceProduct'
            child.attrib['refid'] = 'Read'
            child.text = None
        if operator == 'BandMaths':
            tree.find('.//parameters').set(
                'class', 'com.bc.ceres.binding.dom.XppDomElement')
            tband = tree.find('.//targetBand')
            for item in [
                    'spectralWavelength', 'spectralBandwidth', 'scalingOffset',
                    'scalingFactor', 'validExpression', 'spectralBandIndex'
            ]:
                el = tband.find('.//{}'.format(item))
                tband.remove(el)

        node = Node(node)

        # read the default values from the parameter documentation
        parameters = node.parameters.keys()
        out += '-P'
        for parameter in parameters:
            p1 = r'-P{}.*?-P'.format(parameter)
            p2 = r"Default\ value\ is '([a-zA-Z0-9 ._\(\)]+)'"
            r1 = re.search(p1, out, re.S)
            if r1:
                sub = r1.group()
                r2 = re.search(p2, sub)
                if r2:
                    value = r2.groups()[0]
                    node.parameters[parameter] = value
                    continue
            node.parameters[parameter] = None

        # fill in some additional defaults
        if operator == 'BandMerge':
            node.parameters['geographicError'] = '1.0E-5'

        with open(absname, 'w') as xml:
            xml.write(str(node))

        return node

    else:
        with open(absname, 'r') as workflow:
            element = ET.fromstring(workflow.read())
        return Node(element)
Ejemplo n.º 8
0
def execute(xmlfile,
            cleanup=True,
            gpt_exceptions=None,
            gpt_args=None,
            verbose=True):
    """
    execute SNAP workflows via the Graph Processing Tool gpt.
    This function merely calls gpt with some additional command
    line arguments and raises a RuntimeError on fail. This
    function is used internally by function :func:`gpt`, which
    should be used instead.
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    gpt_exceptions: dict
        a dictionary to override the configured GPT executable for certain operators;
        each (sub-)workflow containing this operator will be executed with the define executable;
        
         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``
    gpt_args: list or None
        a list of additional arguments to be passed to the gpt call
        
        - e.g. ``['-x', '-c', '2048M']`` for increased tile cache size and intermediate clearing
    verbose: bool
        print out status messages?
    
    Returns
    -------
    
    Raises
    ------
    RuntimeError
    """
    # read the file and extract some information
    workflow = Workflow(xmlfile)
    write = workflow['Write']
    outname = write.parameters['file']
    workers = [x.id for x in workflow if x.operator not in ['Read', 'Write']]
    message = ' -> '.join(workers)
    gpt_exec = None
    if gpt_exceptions is not None:
        for item, exec in gpt_exceptions.items():
            if item in workers:
                gpt_exec = exec
                message += ' (using {})'.format(exec)
                break
    if verbose:
        print(message)
    # try to find the GPT executable
    if gpt_exec is None:
        try:
            gpt_exec = ExamineSnap().gpt
        except AttributeError:
            raise RuntimeError('could not find SNAP GPT executable')
    # create the list of arguments to be passed to the subprocess module calling GPT
    cmd = [gpt_exec, '-e']
    if isinstance(gpt_args, list):
        cmd.extend(gpt_args)
    if format == 'GeoTiff-BigTIFF':
        cmd.extend([
            # '-Dsnap.dataio.reader.tileWidth=*',
            # '-Dsnap.dataio.reader.tileHeight=1',
            '-Dsnap.dataio.bigtiff.tiling.width=256',
            '-Dsnap.dataio.bigtiff.tiling.height=256',
            # '-Dsnap.dataio.bigtiff.compression.type=LZW',
            # '-Dsnap.dataio.bigtiff.compression.quality=0.75'
        ])
    cmd.append(xmlfile)
    # execute the workflow
    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
    out, err = proc.communicate()
    out = out.decode('utf-8') if isinstance(out, bytes) else out
    err = err.decode('utf-8') if isinstance(err, bytes) else err

    # check for a message indicating an unknown parameter,
    # which can easily be removed from the workflow
    pattern = r"Error: \[NodeId: (?P<id>[a-zA-Z0-9-_]*)\] " \
              r"Operator \'[a-zA-Z0-9-_]*\': " \
              r"Unknown element \'(?P<par>[a-zA-Z]*)\'"
    match = re.search(pattern, err)

    if proc.returncode == 0:
        pattern = r'(?P<level>WARNING: )([a-zA-Z.]*: )(?P<message>No intersection.*)'
        match = re.search(pattern, err)
        if match is not None:
            raise RuntimeError(re.search(pattern, err).group('message'))
        return

    # delete unknown parameters and run the modified workflow
    elif proc.returncode == 1 and match is not None:
        replace = match.groupdict()
        with Workflow(xmlfile) as flow:
            print(
                '  removing parameter {id}:{par} and executing modified workflow'
                .format(**replace))
            node = flow[replace['id']]
            del node.parameters[replace['par']]
            flow.write(xmlfile)
        execute(xmlfile,
                cleanup=cleanup,
                gpt_exceptions=gpt_exceptions,
                gpt_args=gpt_args,
                verbose=verbose)

    # append additional information to the error message and raise an error
    else:
        if proc.returncode == -9:
            submessage = '[{}] the process was killed by SNAP (process return code -9). ' \
                         'One possible cause is a lack of memory.'.format(os.path.basename(xmlfile))
        else:
            submessage = '{}{}\n[{}] failed with return code {}'
        if cleanup:
            if os.path.isfile(outname + '.tif'):
                os.remove(outname + '.tif')
            elif os.path.isdir(outname):
                shutil.rmtree(outname, onerror=windows_fileprefix)
            elif outname.endswith('.dim'):
                os.remove(outname)
                shutil.rmtree(outname.replace('.dim', '.data'),
                              onerror=windows_fileprefix)
        raise RuntimeError(
            submessage.format(out, err, os.path.basename(xmlfile),
                              proc.returncode))
Ejemplo n.º 9
0
def parse_node(name, use_existing=True):
    """
    parse an XML node recipe. The XML representation and parameter default values are read from the docstring of an
    individual node by calling `gpt <node> -h`. The result is then written to an XML text file under
    `$HOME/.pyroSAR/snap/nodes` which is subsequently read for parsing instead of again calling `gpt`.
    
    Parameters
    ----------
    name: str
        the name of the processing node, e.g. Terrain-Correction
    use_existing: bool
        use an existing XML text file or force re-parsing the gpt docstring and overwriting the XML file?

    Returns
    -------
    Node
        the parsed node
    
    Examples
    --------
    >>> tnr = parse_node('ThermalNoiseRemoval')
    >>> print(tnr.parameters)
    {'selectedPolarisations': None, 'removeThermalNoise': 'true', 'reIntroduceThermalNoise': 'false'}
    """
    name = name if name.endswith('.xml') else name + '.xml'
    operator = os.path.splitext(name)[0]
    abspath = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap',
                           'nodes')
    os.makedirs(abspath, exist_ok=True)
    absname = os.path.join(abspath, name)

    if not os.path.isfile(absname) or not use_existing:
        gpt = ExamineSnap().gpt

        cmd = [gpt, operator, '-h']

        out, err = run(cmd=cmd, void=False)

        graph = re.search('<graph id.*', out, flags=re.DOTALL).group()
        tree = ET.fromstring(graph)
        node = tree.find('node')
        node.attrib['id'] = operator
        # add a second source product entry for multi-source nodes
        # multi-source nodes are those with an entry 'sourceProducts' instead of 'sourceProduct'
        # exceptions are registered in this list:
        multisource = ['Back-Geocoding']
        if operator != 'Read':
            source = node.find('.//sources')
            child = source[0]
            if child.tag == 'sourceProducts' or operator in multisource:
                child2 = ET.SubElement(source, 'sourceProduct.1',
                                       {'refid': 'Read (2)'})
            child.tag = 'sourceProduct'
            child.attrib['refid'] = 'Read'
            child.text = None

        node = Node(node)

        # read the default values from the parameter documentation
        parameters = node.parameters.keys()
        out += '-P'
        for parameter in parameters:
            p1 = r'-P{}.*?-P'.format(parameter)
            p2 = r"Default\ value\ is '([a-zA-Z0-9 ._\(\)]+)'"
            r1 = re.search(p1, out, re.S)
            if r1:
                sub = r1.group()
                r2 = re.search(p2, sub)
                if r2:
                    value = r2.groups()[0]
                    node.parameters[parameter] = value
                    continue
            node.parameters[parameter] = None

        with open(absname, 'w') as xml:
            xml.write(str(node))

        return node

    else:
        with open(absname, 'r') as workflow:
            element = ET.fromstring(workflow.read())
        return Node(element)
Ejemplo n.º 10
0
def execute(xmlfile, cleanup=True, gpt_exceptions=None, verbose=True):
    """
    execute SNAP workflows via the Graph Processing Tool gpt.
    This function merely calls gpt with some additional command
    line arguments and raises a RuntimeError on fail. This
    function is used internally by function :func:`gpt`, which
    should be used instead.
    
    Parameters
    ----------
    xmlfile: str
        the name of the workflow XML file
    cleanup: bool
        should all files written to the temporary directory during function execution be deleted after processing?
    gpt_exceptions: dict
        a dictionary to override the configured GPT executable for certain operators;
        each (sub-)workflow containing this operator will be executed with the define executable;
        
         - e.g. ``{'Terrain-Flattening': '/home/user/snap/bin/gpt'}``
    verbose: bool
        print out status messages?
    
    Returns
    -------
    
    Raises
    ------
    RuntimeError
    """
    # read the file and extract some information
    workflow = Workflow(xmlfile)
    write = workflow['Write']
    outname = write.parameters['file']
    infile = workflow['Read'].parameters['file']
    nodes = workflow.nodes()
    workers = [x.id for x in nodes if x.operator not in ['Read', 'Write']]
    message = ' -> '.join(workers)
    gpt_exec = None
    if gpt_exceptions is not None:
        for item, exec in gpt_exceptions.items():
            if item in workers:
                gpt_exec = exec
                message += ' (using {})'.format(exec)
                break
    if verbose:
        print(message)
    # try to find the GPT executable
    if gpt_exec is None:
        try:
            gpt_exec = ExamineSnap().gpt
        except AttributeError:
            raise RuntimeError('could not find SNAP GPT executable')
    # create the list of arguments to be passed to the subprocess module calling GPT
    if format == 'GeoTiff-BigTIFF':
        cmd = [
            gpt_exec,
            # '-Dsnap.dataio.reader.tileWidth=*',
            # '-Dsnap.dataio.reader.tileHeight=1',
            '-Dsnap.dataio.bigtiff.tiling.width=256',
            '-Dsnap.dataio.bigtiff.tiling.height=256',
            # '-Dsnap.dataio.bigtiff.compression.type=LZW',
            # '-Dsnap.dataio.bigtiff.compression.quality=0.75',
            xmlfile
        ]
    else:
        cmd = [gpt_exec, xmlfile]
    # execute the workflow
    proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
    out, err = proc.communicate()
    out = out.decode('utf-8') if isinstance(out, bytes) else out
    err = err.decode('utf-8') if isinstance(err, bytes) else err
    # delete intermediate files if an error occurred
    if proc.returncode == 1:
        pattern = r"Error: \[NodeId: (?P<id>[a-zA-Z0-9-_]*)\] " \
                  r"Operator \'[a-zA-Z0-9-_]*\': " \
                  r"Unknown element \'(?P<par>[a-zA-Z]*)\'"
        match = re.search(pattern, err)
        if match is not None:
            replace = match.groupdict()
            with Workflow(xmlfile) as flow:
                print(
                    '  removing parameter {id}:{par} and executing modified workflow'
                    .format(**replace))
                node = flow[replace['id']]
                del node.parameters[replace['par']]
                flow.write(xmlfile)
            execute(xmlfile,
                    cleanup=cleanup,
                    gpt_exceptions=gpt_exceptions,
                    verbose=verbose)
        else:
            if cleanup:
                if os.path.isfile(outname + '.tif'):
                    os.remove(outname + '.tif')
                elif os.path.isdir(outname):
                    shutil.rmtree(outname)
            print(out + err)
            print('failed: {}'.format(os.path.basename(infile)))
            err_match = re.search('Error: (.*)\n', out + err)
            errmessage = err_match.group(1) if err_match else err
            raise RuntimeError(errmessage)
    elif proc.returncode == -9:
        if cleanup:
            if os.path.isfile(outname + '.tif'):
                os.remove(outname + '.tif')
            elif os.path.isdir(outname):
                shutil.rmtree(outname)
        print(
            'the process was killed by SNAP. One possible cause is a lack of memory.'
        )
    else:
        print('process return code: {}'.format(proc.returncode))
Ejemplo n.º 11
0
def correctOSV(id,
               osvdir=None,
               osvType='POE',
               logpath=None,
               outdir=None,
               shellscript=None):
    """
    correct GAMMA parameter files with orbit state vector information from dedicated OSV files;
    OSV files are downloaded automatically to either the defined `osvdir` or a sub-directory `osv` of the scene directory
    
    Parameters
    ----------
    id: ~pyroSAR.drivers.ID
        the scene to be corrected
    osvdir: str
        the directory of OSV files; subdirectories POEORB and RESORB are created automatically
    osvType: {'POE', 'RES'}
        the OSV type to be used
    logpath: str or None
        a directory to write command logfiles to
    outdir: str or None
        the directory to execute the command in
    shellscript: str or None
        a file to write the Gamma commands to in shell format

    Returns
    -------
    
    Examples
    --------
    
    >>> from pyroSAR import identify
    >>> from pyroSAR.gamma import correctOSV, convert2gamma
    >>> filename = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
    # identify the SAR scene
    >>> scene = identify(filename)
    # unpack the zipped scene to an arbitrary directory
    >>> scene.unpack('/home/test')
    >>> print(scene.scene)
    /home/test/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.SAFE
    # convert the unpacked scene to GAMMA format
    >>> convert2gamma(id=scene, directory=scene.scene)
    # correct the OSV information of the converted GAMMA images
    >>> correctOSV(id=scene, osvdir='/home/test/osv')
    
    See Also
    --------
    :meth:`pyroSAR.drivers.SAFE.getOSV`
    """

    if not isinstance(id, ID):
        raise IOError('id must be of type pyroSAR.ID')

    if id.sensor not in ['S1A', 'S1B']:
        raise IOError(
            'this method is currently only available for Sentinel-1. Please stay tuned...'
        )

    if not os.path.isdir(logpath):
        os.makedirs(logpath)

    if osvdir is None:
        try:
            auxdatapath = ExamineSnap().auxdatapath
        except AttributeError:
            auxdatapath = os.path.join(os.path.expanduser('~'), '.snap',
                                       'auxdata')
        osvdir = os.path.join(auxdatapath, 'Orbits', 'Sentinel-1')
    try:
        id.getOSV(osvdir, osvType)
    except URLError:
        print('..no internet access')

    images = id.getGammaImages(id.scene)
    # read parameter file entries into object
    with ISPPar(images[0] + '.par') as par:
        # extract acquisition time stamp
        timestamp = datetime.strptime(
            par.date, '%Y-%m-%dT%H:%M:%S.%f').strftime('%Y%m%dT%H%M%S')

    # find an OSV file matching the time stamp and defined OSV type(s)
    with OSV(osvdir) as osv:
        osvfile = osv.match(sensor=id.sensor,
                            timestamp=timestamp,
                            osvtype=osvType)
    if not osvfile:
        raise RuntimeError('no Orbit State Vector file found')

    if osvfile.endswith('.zip'):
        osvdir = os.path.join(id.scene, 'osv')
        with zf.ZipFile(osvfile) as zip:
            zip.extractall(path=osvdir)
        osvfile = os.path.join(osvdir,
                               os.path.basename(osvfile).replace('.zip', ''))

    # update the GAMMA parameter file with the selected orbit state vectors
    print('correcting state vectors with file {}'.format(osvfile))
    for image in images:
        isp.S1_OPOD_vec(SLC_par=image + '.par',
                        OPOD=osvfile,
                        logpath=logpath,
                        outdir=outdir,
                        shellscript=shellscript)