示例#1
0
def print_date_list(fname, disp_num=False, drop_ifgram=False, print_msg=False):
    """Print time/date info of file"""
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        dateList = timeseries(fname).get_date_list()
    elif k == 'HDFEOS':
        obj = HDFEOS(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k == 'giantTimeseries':
        obj = giantTimeseries(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k in ['ifgramStack']:
        dateList = ifgramStack(fname).get_date12_list(dropIfgram=drop_ifgram)
    elif k in ['giantIfgramStack']:
        obj = giantIfgramStack(fname)
        obj.open(print_msg=False)
        dateList = obj.date12List
    else:
        print('--date option can not be applied to {} file, ignore it.'.format(k))

    if print_msg and dateList is not None:
        for i in range(len(dateList)):
            if disp_num:
                print('{}\t{}'.format(dateList[i], i))
            else:
                print(dateList[i])
    return dateList
示例#2
0
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False):
    """Print time/date info of file"""
    k = readfile.read_attribute(fname)['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        dateList = timeseries(fname).get_date_list()

    elif k == 'HDFEOS':
        dateList = HDFEOS(fname).get_date_list()

    elif k == 'giantTimeseries':
        dateList = giantTimeseries(fname).get_date_list()

    elif k in ['giantIfgramStack']:
        dateList = giantIfgramStack(fname).get_date12_list()

    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateListAll = obj.get_date12_list(dropIfgram=False)
        dateListKept = obj.get_date12_list(dropIfgram=True)

        # show dropped ifgram or not
        if disp_ifgram == 'all':
            dateList = list(dateListAll)
        elif disp_ifgram == 'kept':
            dateList = list(dateListKept)
        else:
            dateList = sorted(list(set(dateListAll) - set(dateListKept)))

    else:
        print('--date option can not be applied to {} file, ignore it.'.format(
            k))

    # print list info
    if print_msg and dateList is not None:
        for d in dateList:
            if disp_num:
                if k in ['ifgramStack']:
                    num = dateListAll.index(d)
                else:
                    num = dateList.index(d)
                msg = '{}\t{}'.format(d, num)
            else:
                msg = d
            print(msg)
    return dateList
示例#3
0
文件: readfile.py 项目: whigg/PySAR
def read_attribute(fname, datasetName=None, standardize=True, meta_ext=None):
    """Read attributes of input file into a dictionary
    Parameters: fname : str, path/name of data file
                datasetName : str, name of dataset of interest, for file with multiple datasets
                    e.g. unwrapPhase in ifgramStack.h5
                         coherence   in ifgramStack.h5
                         height      in geometryRadar.h5
                         latitude    in geometryRadar.h5
                         ...
                standardize : bool, grab standardized metadata key name
    Returns:    atr : dict, attributes dictionary
    """
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    if not os.path.isfile(fname):
        msg = 'input file not existed: {}\n'.format(fname)
        msg += 'current directory: ' + os.getcwd()
        raise Exception(msg)

    # HDF5 files
    if fext in ['.h5', '.he5']:
        f = h5py.File(fname, 'r')
        g1_list = [i for i in f.keys() if isinstance(f[i], h5py.Group)]
        d1_list = [
            i for i in f.keys()
            if isinstance(f[i], h5py.Dataset) and f[i].ndim >= 2
        ]

        # FILE_TYPE - k
        if any(i in d1_list for i in ['unwrapPhase']):
            k = 'ifgramStack'
        elif any(i in d1_list for i in ['height', 'latitude', 'azimuthCoord']):
            k = 'geometry'
        elif any(i in g1_list + d1_list
                 for i in ['timeseries', 'displacement']):
            k = 'timeseries'
        elif 'HDFEOS' in g1_list:
            k = 'HDFEOS'
        elif 'recons' in d1_list:
            k = 'giantTimeseries'
        elif any(i in d1_list for i in ['igram', 'figram']):
            k = 'giantIfgramStack'
        elif any(i in g1_list
                 for i in multi_group_hdf5_file):  # old pysar format
            k = list(set(g1_list) & set(multi_group_hdf5_file))[0]
        elif len(d1_list) > 0:
            k = d1_list[0]
        elif len(g1_list) > 0:
            k = g1_list[0]
        else:
            raise ValueError('unrecognized file type: ' + fname)

        # metadata dict
        if k == 'giantTimeseries':
            atr = giantTimeseries(fname).get_metadata()
        elif k == 'giantIfgramStack':
            atr = giantIfgramStack(fname).get_metadata()
        else:
            if len(f.attrs) > 0 and 'WIDTH' in f.attrs.keys():
                atr = dict(f.attrs)
            else:
                # grab the list of attrs in HDF5 file
                global atr_list

                def get_hdf5_attrs(name, obj):
                    global atr_list
                    if len(obj.attrs) > 0 and 'WIDTH' in obj.attrs.keys():
                        atr_list.append(dict(obj.attrs))

                atr_list = []
                f.visititems(get_hdf5_attrs)
                # use the attrs with most items
                if atr_list:
                    num_list = [len(i) for i in atr_list]
                    atr = atr_list[np.argmax(num_list)]
                else:
                    raise ValueError('No attribute WIDTH found in file:',
                                     fname)

        # decode string format
        for key, value in atr.items():
            try:
                atr[key] = value.decode('utf8')
            except:
                atr[key] = value

        # attribute identified by PySAR
        # 1. FILE_TYPE
        atr['FILE_TYPE'] = str(k)

        # 2. DATA_TYPE
        ds = None
        if datasetName and datasetName in f.keys():
            ds = f[datasetName]
        else:
            # get the 1st dataset
            global ds_list

            def get_hdf5_dataset(name, obj):
                global ds_list
                if isinstance(obj, h5py.Dataset) and obj.ndim >= 2:
                    ds_list.append(obj)

            ds_list = []
            f.visititems(get_hdf5_dataset)
            if ds_list:
                ds = ds_list[0]
        if ds is not None:
            atr['DATA_TYPE'] = str(ds.dtype)
        f.close()

        # 3. PROCESSOR
        if 'INSAR_PROCESSOR' in atr.keys():
            atr['PROCESSOR'] = atr['INSAR_PROCESSOR']
        if 'PROCESSOR' not in atr.keys():
            atr['PROCESSOR'] = 'pysar'

    else:
        # get existing metadata files
        metafile_exts = ['.rsc', '.xml', '.aux.xml', '.par', '.hdr']
        if meta_ext:
            metafile_exts = [i for i in metafile_exts if i.endswith(meta_ext)]
        metafile_exts = [i for i in metafile_exts if os.path.isfile(fname + i)]
        if len(metafile_exts) == 0:
            raise FileNotFoundError(
                'No metadata file found for data file: {}'.format(fname))

        # Read metadata file and FILE_TYPE
        while fext in ['.geo', '.rdr']:
            fbase, fext = os.path.splitext(fbase)
        if not fext:
            fext = fbase
        metafile0 = fname + metafile_exts[0]
        if metafile0.endswith('.rsc'):
            atr = read_roipac_rsc(metafile0)
            if 'FILE_TYPE' not in atr.keys():
                atr['FILE_TYPE'] = fext

        elif metafile0.endswith('.xml'):
            atr = read_isce_xml(metafile0)
            if 'FILE_TYPE' not in atr.keys():
                atr['FILE_TYPE'] = atr.get('image_type', fext)

        elif metafile0.endswith('.par'):
            atr = read_gamma_par(metafile0)
            atr['FILE_TYPE'] = fext

        elif metafile0.endswith('.hdr'):
            atr = read_template(metafile0)
            atr['DATA_TYPE'] = ENVI2NUMPY_DATATYPE[xmlDict.get(
                'data type', '4')]
            atr['FILE_TYPE'] = atr['file type']

        # PROCESSOR
        if any(i.endswith(('.xml', '.hdr')) for i in metafile_exts):
            atr['PROCESSOR'] = 'isce'
            #atr.update(read_isce_xml(fname+'.xml'))
        elif any(i.endswith('.par') for i in metafile_exts):
            atr['PROCESSOR'] = 'gamma'
        elif any(i.endswith('.rsc') for i in metafile_exts):
            if 'PROCESSOR' not in atr.keys():
                atr['PROCESSOR'] = 'roipac'
        if 'PROCESSOR' not in atr.keys():
            atr['PROCESSOR'] = 'pysar'

    # UNIT
    k = atr['FILE_TYPE'].replace('.', '')
    if k == 'ifgramStack':
        if datasetName and datasetName in datasetUnitDict.keys():
            atr['UNIT'] = datasetUnitDict[datasetName]
        else:
            atr['UNIT'] = 'radian'
    elif 'UNIT' not in atr.keys():
        if datasetName and datasetName in datasetUnitDict.keys():
            atr['UNIT'] = datasetUnitDict[datasetName]
        elif k in datasetUnitDict.keys():
            atr['UNIT'] = datasetUnitDict[k]
        else:
            atr['UNIT'] = '1'

    # FILE_PATH
    atr['FILE_PATH'] = os.path.abspath(fname)

    if standardize:
        atr = standardize_metadata(atr)
    return atr
示例#4
0
文件: readfile.py 项目: whigg/PySAR
def get_slice_list(fname):
    """Get list of 2D slice existed in file (for display)"""
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    atr = read_attribute(fname)
    k = atr['FILE_TYPE']

    global slice_list
    # HDF5 Files
    if fext in ['.h5', '.he5']:
        with h5py.File(fname, 'r') as f:
            d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]
        if k == 'timeseries' and k in d1_list:
            obj = timeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['geometry'] and k not in d1_list:
            obj = geometry(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['ifgramStack']:
            obj = ifgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['HDFEOS']:
            obj = HDFEOS(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantTimeseries']:
            obj = giantTimeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantIfgramStack']:
            obj = giantIfgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        else:
            ## Find slice by walking through the file structure
            length, width = int(atr['LENGTH']), int(atr['WIDTH'])

            def get_hdf5_2d_dataset(name, obj):
                global slice_list
                if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length,
                                                                        width):
                    if obj.ndim == 2:
                        slice_list.append(name)
                    else:
                        warnings.warn(
                            'file has un-defined {}D dataset: {}'.format(
                                obj.ndim, name))

            slice_list = []
            with h5py.File(fname, 'r') as f:
                f.visititems(get_hdf5_2d_dataset)

    # Binary Files
    else:
        if fext.lower() in ['.trans', '.utm_to_rdc']:
            slice_list = ['rangeCoord', 'azimuthCoord']
        elif fbase.startswith('los'):
            slice_list = ['incidenceAngle', 'azimuthAngle']
        elif atr.get('number_bands', '1') == '2' and 'unw' not in k:
            slice_list = ['band1', 'band2']
        else:
            slice_list = ['']
    return slice_list