Exemplo n.º 1
0
def update_object(outFile, inObj, box, updateMode=True):
    """Do not write h5 file if: 1) h5 exists and readable,
                                2) it contains all date12 from ifgramStackDict,
                                            or all datasets from geometryDict"""
    write_flag = True
    if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip':
        if inObj.name == 'ifgramStack':
            in_size = inObj.get_size(box=box)[1:]
            in_date12_list = inObj.get_date12_list()

            outObj = ifgramStack(outFile)
            out_size = outObj.get_size()[1:]
            out_date12_list = outObj.get_date12_list(dropIfgram=False)

            if out_size == in_size and set(in_date12_list).issubset(
                    set(out_date12_list)):
                print((
                    'All date12   exists in file {} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                write_flag = False

        elif inObj.name == 'geometry':
            outObj = geometry(outFile)
            outObj.open(print_msg=False)
            if (outObj.get_size() == inObj.get_size(box=box)
                    and all(i in outObj.datasetNames
                            for i in inObj.get_dataset_list())):
                print((
                    'All datasets exists in file {} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                write_flag = False
    return write_flag
Exemplo n.º 2
0
def update_object(outFile, inObj, box, updateMode=True):
    """Do not write h5 file if: 1) h5 exists and readable,
                                2) it contains all date12 from ifgramStackDict,
                                            or all datasets from geometryDict"""
    updateFile = True
    if updateMode and not ut.update_file(outFile, check_readable=True):
        if inObj.name == 'ifgramStack':
            outObj = ifgramStack(outFile)
            if (outObj.get_size() == inObj.get_size(box=box)
                    and sorted(outObj.get_date12_list(dropIfgram=False))
                    == sorted(inObj.get_date12_list())):
                print((
                    'All date12   exists in file {} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                updateFile = False

        elif inObj.name == 'geometry':
            outObj = geometry(outFile)
            outObj.open(print_msg=False)
            if (outObj.get_size() == inObj.get_size(box=box)
                    and all(i in outObj.datasetNames
                            for i in inObj.get_dataset_list())):
                print((
                    'All datasets exists in file{} with same size as required,'
                    ' no need to re-load.'.format(outFile)))
                updateFile = False
    return updateFile
Exemplo n.º 3
0
def read_geometry(inps):
    ts_obj = timeseries(inps.timeseries_file)
    ts_obj.open(print_msg=False)
    # 2D / 3D geometry
    if inps.geom_file:
        geom_obj = geometry(inps.geom_file)
        geom_obj.open()
        print(('read 2D incidenceAngle,slantRangeDistance from {} file:'
               ' {}').format(geom_obj.name, os.path.basename(geom_obj.file)))
        inps.incAngle = geom_obj.read(datasetName='incidenceAngle', print_msg=False).flatten()
        inps.rangeDist = geom_obj.read(datasetName='slantRangeDistance', print_msg=False).flatten()
        if 'bperp' in geom_obj.datasetNames:
            print('read 3D bperp from {} file: {} ...'.format(geom_obj.name, os.path.basename(geom_obj.file)))
            inps.pbase = geom_obj.read(datasetName='bperp', print_msg=False).reshape((geom_obj.numDate, -1))
            inps.pbase -= inps.pbase[ts_obj.refIndex]
        else:
            print('read mean bperp from {} file'.format(ts_obj.name))
            inps.pbase = ts_obj.pbase.reshape((-1, 1))

    # 0D geometry
    else:
        print('read mean incidenceAngle,slantRangeDistance,bperp value from {} file'.format(ts_obj.name))
        inps.incAngle = ut.incidence_angle(ts_obj.metadata, dimension=0)
        inps.rangeDist = ut.range_distance(ts_obj.metadata, dimension=0)
        inps.pbase = ts_obj.pbase.reshape((-1, 1))

    inps.sinIncAngle = np.sin(inps.incAngle * np.pi / 180.)
    return inps
Exemplo n.º 4
0
def check_loaded_dataset(work_dir='./', print_msg=True):
    """Check the result of loading data for the following two rules:
        1. file existance
        2. file attribute readability

    Parameters: work_dir  : string, PySAR working directory
                print_msg : bool, print out message
    Returns:    True, if all required files and dataset exist; otherwise, ERROR
                    If True, PROCESS, SLC folder could be removed.
                stack_file  : 
                geom_file   :
                lookup_file :
    Example:    work_dir = os.path.expandvars('$SCRATCHDIR/SinabungT495F50AlosA/PYSAR')
                ut.check_loaded_dataset(work_dir)
    """
    load_complete = True

    if not work_dir:
        work_dir = os.getcwd()
    work_dir = os.path.abspath(work_dir)

    # 1. interferograms stack file: unwrapPhase, coherence
    flist = [os.path.join(work_dir, 'INPUTS/ifgramStack.h5')]
    stack_file = is_file_exist(flist, abspath=True)
    if stack_file is not None:
        obj = ifgramStack(stack_file)
        obj.open(print_msg=False)
        for dname in ['unwrapPhase', 'coherence']:
            if dname not in obj.datasetNames:
                raise ValueError(
                    'required dataset "{}" is missing in file {}'.format(
                        dname, stack_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/ifgramStack.h5')

    atr = readfile.read_attribute(stack_file)

    # 2. geom_file: height
    if 'X_FIRST' in atr.keys():
        flist = [os.path.join(work_dir, 'INPUTS/geometryGeo.h5')]
    else:
        flist = [os.path.join(work_dir, 'INPUTS/geometryRadar.h5')]
    geom_file = is_file_exist(flist, abspath=True)
    if geom_file is not None:
        obj = geometry(geom_file)
        obj.open(print_msg=False)
        dname = geometryDatasetNames[0]
        if dname not in obj.datasetNames:
            raise ValueError(
                'required dataset "{}" is missing in file {}'.format(
                    dname, geom_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/geometry*.h5')

    # 3. lookup_file: latitude,longitude or rangeCoord,azimuthCoord
    # could be different than geometry file in case of roipac and gamma
    flist = [os.path.join(work_dir, 'INPUTS/geometry*.h5')]
    lookup_file = get_lookup_file(flist, abspath=True, print_msg=print_msg)
    if lookup_file is not None:
        obj = geometry(lookup_file)
        obj.open(print_msg=False)

        if atr['PROCESSOR'] in ['isce', 'doris']:
            dnames = [geometryDatasetNames[1], geometryDatasetNames[2]]
        elif atr['PROCESSOR'] in ['gamma', 'roipac']:
            dnames = [geometryDatasetNames[3], geometryDatasetNames[4]]
        else:
            raise AttributeError('InSAR processor: {}'.format(
                atr['PROCESSOR']))

        for dname in dnames:
            if dname not in obj.datasetNames:
                load_complete = False
                raise Exception(
                    'required dataset "{}" is missing in file {}'.format(
                        dname, lookup_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/geometry*.h5')

    # print message
    if print_msg:
        print(('Loaded dataset are processed by '
               'InSAR software: {}'.format(atr['PROCESSOR'])))
        if 'X_FIRST' in atr.keys():
            print('Loaded dataset is in GEO coordinates')
        else:
            print('Loaded dataset is in RADAR coordinates')
        print('Interferograms Stack: {}'.format(stack_file))
        print('Geometry File       : {}'.format(geom_file))
        print('Lookup Table File   : {}'.format(lookup_file))
        if load_complete:
            print('-' * 50)
            print(
                'All data needed found/loaded/copied. Processed 2-pass InSAR data can be removed.'
            )
        print('-' * 50)
    return load_complete, stack_file, geom_file, lookup_file
Exemplo n.º 5
0
def get_slice_list(fname):
    """Get list of 2D slice existed in file (for display)"""
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    atr = read_attribute(fname)
    k = atr['FILE_TYPE']

    global slice_list
    # HDF5 Files
    if fext in ['.h5', '.he5']:
        with h5py.File(fname, 'r') as f:
            d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]
        if k == 'timeseries' and k in d1_list:
            obj = timeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['geometry'] and k not in d1_list:
            obj = geometry(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['ifgramStack']:
            obj = ifgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['HDFEOS']:
            obj = HDFEOS(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantTimeseries']:
            obj = giantTimeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantIfgramStack']:
            obj = giantIfgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        else:
            ## Find slice by walking through the file structure
            length, width = int(atr['LENGTH']), int(atr['WIDTH'])

            def get_hdf5_2d_dataset(name, obj):
                global slice_list
                if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length,
                                                                        width):
                    if obj.ndim == 2:
                        slice_list.append(name)
                    else:
                        warnings.warn(
                            'file has un-defined {}D dataset: {}'.format(
                                obj.ndim, name))

            slice_list = []
            with h5py.File(fname, 'r') as f:
                f.visititems(get_hdf5_2d_dataset)

    # Binary Files
    else:
        if fext.lower() in ['.trans', '.utm_to_rdc']:
            slice_list = ['rangeCoord', 'azimuthCoord']
        elif fbase.startswith('los'):
            slice_list = ['incidenceAngle', 'azimuthAngle']
        elif atr.get('number_bands', '1') == '2' and 'unw' not in k:
            slice_list = ['band1', 'band2']
        else:
            slice_list = ['']
    return slice_list
Exemplo n.º 6
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    # check 1 - existing tropo delay file
    if (ut.run_or_skip(out_file=inps.trop_file,
                       in_file=inps.grib_file_list,
                       print_msg=False) == 'skip'
            and get_dataset_size(inps.trop_file) == get_dataset_size(
                inps.geom_file)):
        print(
            '{} file exists and is newer than all GRIB files, skip updating.'.
            format(inps.trop_file))
        return

    # check 2 - geometry file
    if any(i is None for i in [inps.geom_file, inps.ref_yx]):
        print(
            'No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.'
        )
        if not os.path.isfile(inps.trop_file):
            inps.trop_file = None
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    if 'latitude' in geom_obj.datasetNames:
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    else:
        inps.lat, inps.lon = get_lat_lon(geom_obj.metadata)

    # calculate phase delay
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_file_list)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
    trop_data = np.zeros((num_date, length, width), np.float32)

    print(
        'calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...'
    )
    print('number of grib files used: {}'.format(num_date))
    if verbose:
        prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_file_list[i]
        trop_data[i] = get_delay(grib_file, inps)
        if verbose:
            prog_bar.update(i + 1, suffix=os.path.basename(grib_file))
    if verbose:
        prog_bar.close()

    # Convert relative phase delay on reference date
    inps.ref_date = atr.get('REF_DATE', date_list[0])
    print('convert to relative phase delay with reference date: ' +
          inps.ref_date)
    inps.ref_idx = date_list.index(inps.ref_date)
    trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

    # Write tropospheric delay to HDF5
    atr['REF_Y'] = inps.ref_yx[0]
    atr['REF_X'] = inps.ref_yx[1]
    ts_obj = timeseries(inps.trop_file)
    ts_obj.write2hdf5(data=trop_data,
                      dates=date_list,
                      metadata=atr,
                      refFile=inps.timeseries_file)
    return
Exemplo n.º 7
0
def write2hdf5(out_file, ts_file, coh_file, mask_file, geom_file, metadata):
    """Write HDF5 file in HDF-EOS5 format"""
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    dateList = ts_obj.dateList

    # Open HDF5 File
    f = h5py.File(out_file, 'w')
    print('create HDF5 file: {} with w mode'.format(out_file))
    maxDigit = 20

    # Write Observation - Displacement
    gName = 'HDFEOS/GRIDS/timeseries/observation'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    dsName = 'displacement'
    data = ts_obj.read(print_msg=False)
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                dtype=np.float32,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['_FillValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'meters'

    dsName = 'date'
    data = np.array(dateList, dtype=np.string_)
    group.create_dataset(dsName, data=data)
    print('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'.format(
        g=gName, d=dsName, w=maxDigit, t=str(data.dtype), s=data.shape))

    dsName = 'bperp'
    data = np.array(ts_obj.pbase, dtype=np.float32)
    group.create_dataset(dsName, data=data)
    print('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'.format(
        g=gName, d=dsName, w=maxDigit, t=str(data.dtype), s=data.shape))

    # Write Quality
    gName = 'HDFEOS/GRIDS/timeseries/quality'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    ## 1 - temporalCoherence
    dsName = 'temporalCoherence'
    data = readfile.read(coh_file)[0]
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['_FillValue'] = FLOAT_ZERO
    dset.attrs['Units'] = '1'

    ## 2 - mask
    dsName = 'mask'
    data = readfile.read(mask_file, datasetName='mask')[0]
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = BOOL_ZERO
    dset.attrs['_FillValue'] = BOOL_ZERO
    dset.attrs['Units'] = '1'

    # Write Geometry
    # Required: height, incidenceAngle
    # Optional: rangeCoord, azimuthCoord, azimuthAngle, slantRangeDistance, waterMask, shadowMask
    gName = 'HDFEOS/GRIDS/timeseries/geometry'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    geom_obj = geometry(geom_file)
    geom_obj.open(print_msg=False)
    for dsName in geom_obj.datasetNames:
        data = geom_obj.read(datasetName=dsName, print_msg=False)
        print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
               ' with compression={c}').format(g=gName,
                                               d=dsName,
                                               w=maxDigit,
                                               t=str(data.dtype),
                                               s=data.shape,
                                               c=compression))
        dset = group.create_dataset(dsName,
                                    data=data,
                                    chunks=True,
                                    compression=compression)

        dset.attrs['Title'] = dsName
        if dsName in ['height', 'slantRangeDistance', 'bperp']:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = 'meters'

        elif dsName in [
                'incidenceAngle', 'azimuthAngle', 'latitude', 'longitude'
        ]:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = 'degrees'

        elif dsName in ['rangeCoord', 'azimuthCoord']:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = '1'

        elif dsName in ['waterMask', 'shadowMask']:
            dset.attrs['MissingValue'] = BOOL_ZERO
            dset.attrs['_FillValue'] = BOOL_ZERO
            dset.attrs['Units'] = '1'

    # Write Attributes to the HDF File
    print('write metadata to root level')
    for key, value in iter(metadata.items()):
        f.attrs[key] = value
    f.close()
    print('finished writing to {}'.format(out_file))

    return out_file