Пример #1
0
def main(argv):

    ##### Check Inputs
    if not argv or argv[0] in ['-h','--help']:
        usage()
        sys.exit(1)
    if len(argv) < 2:  print('\nAt lease 2 inputs are needed.\n'); sys.exit(1)

    ##### Read Original Attributes
    print '************ Add / Update HDF5 File Attributes *************'
    File = argv[0]
    atr  = readfile.read_attribute(File)
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']+': '+File

    ##### Read New Attributes
    atr_new = dict()
    for i in range(1,len(argv)):
        if os.path.isfile(argv[i]):
            atr_tmp = readfile.read_template(argv[i])
            atr_new.update(atr_tmp)
        else:
            atr_tmp = argv[i].split('=')
            atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip()
    print 'The following attributes will be added/updated:'
    info.print_attributes(atr_new)

    ##### Update h5 File
    k = atr['FILE_TYPE']
    h5 = h5py.File(File,'r+')
    for key, value in atr_new.iteritems():
        h5[k].attrs[key] = value
    h5.close
    print 'Done.'

    return
Пример #2
0
def main(argv):

    ##### Check Inputs
    if not argv or argv[0] in ['-h', '--help']:
        usage()
        sys.exit(1)
    if len(argv) < 2 or not argv[1]:
        raise Exception('\nAt lease 2 inputs are needed.\n')

    ##### Read Original Attributes
    #print '************ Add / Update HDF5 File Attributes *************'
    File = argv[0]
    atr = readfile.read_attribute(File)
    print 'Input file is ' + atr['PROCESSOR'] + ' ' + atr[
        'FILE_TYPE'] + ': ' + File

    ##### Read New Attributes
    atr_new = dict()
    for i in range(1, len(argv)):
        if os.path.isfile(argv[i]):
            atr_tmp = readfile.read_template(argv[i])
            atr_new.update(atr_tmp)
        else:
            atr_tmp = argv[i].split('=')
            atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip()
    print "The following attributes will be added/updated, or removed if new value is 'None':"
    info.print_attributes(atr_new)

    ext = os.path.splitext(File)[1]
    ##### Update h5 File
    if ext in ['.h5', '.he5']:
        File = ut.add_attribute(File, atr_new)
    else:
        if not ut.update_attribute_or_not(atr_new, atr):
            print 'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.'
        else:
            for key, value in atr_new.iteritems():
                # delete the item is new value is None
                if value == 'None':
                    try:
                        atr.pop(key)
                    except:
                        pass
                else:
                    atr[key] = value
            if atr['PROCESSOR'] == 'roipac':
                print 'writing >>> ' + File + '.rsc'
                writefile.write_roipac_rsc(atr, File + '.rsc')

    return File
Пример #3
0
def prep_metadata(ts_file, print_msg=True):
    """Prepare metadata for HDF-EOS5 file"""
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    unavco_meta_dict = metadata_pysar2unavco(ts_obj.metadata, ts_obj.dateList)
    if print_msg:
        print('## UNAVCO Metadata:')
        print('-----------------------------------------')
        info.print_attributes(unavco_meta_dict)
        print('-----------------------------------------')

    meta_dict = dict(ts_obj.metadata)
    meta_dict.update(unavco_meta_dict)
    meta_dict['FILE_TYPE'] = 'HDFEOS'
    return meta_dict
Пример #4
0
def read_input_attribute(argv, print_msg=True):
    atr_new = dict()
    for i in range(1, len(argv)):
        if os.path.isfile(argv[i]):
            atr_tmp = readfile.read_template(argv[i])
            atr_new.update(atr_tmp)
        else:
            atr_tmp = argv[i].split('=')
            atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip()

    if print_msg:
        print(
            "The following attributes will be added/updated, or removed if new value is 'None':"
        )
        info.print_attributes(atr_new)
    return atr_new
Пример #5
0
def main(argv):
    inps = cmdLineParse()
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    ##### Prepare Metadata
    pysar_meta_dict = readfile.read_attribute(inps.timeseries_file)
    k = pysar_meta_dict['FILE_TYPE']
    length = int(pysar_meta_dict['FILE_LENGTH'])
    width = int(pysar_meta_dict['WIDTH'])
    h5_timeseries = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5_timeseries[k].keys())
    dateNum = len(dateList)
    dateListStr = str(dateList).translate(None, "[],u'")
    pysar_meta_dict['DATE_TIMESERIES'] = dateListStr

    unavco_meta_dict = metadata_pysar2unavco(pysar_meta_dict, dateList)
    print '## UNAVCO Metadata:'
    print '-----------------------------------------'
    info.print_attributes(unavco_meta_dict)

    meta_dict = pysar_meta_dict.copy()
    meta_dict.update(unavco_meta_dict)
    print '-----------------------------------------'

    ##### Open HDF5 File
    #####Get output filename
    SAT = meta_dict['mission']
    SW = meta_dict['beam_mode']
    if meta_dict['beam_swath']:
        SW += str(meta_dict['beam_swath'])
    RELORB = "%03d" % (int(meta_dict['relative_orbit']))

    ##Frist and/or Last Frame
    frame1 = int(meta_dict['frame'])
    key = 'first_frame'
    if key in meta_dict.keys():
        frame1 = int(meta_dict[key])
    FRAME = "%04d" % (frame1)
    key = 'last_frame'
    if key in meta_dict.keys():
        frame2 = int(meta_dict[key])
        if frame2 != frame1:
            FRAME += "_%04d" % (frame2)

    TBASE = "%04d" % (0)
    BPERP = "%05d" % (0)
    DATE1 = dt.datetime.strptime(meta_dict['first_date'],
                                 '%Y-%m-%d').strftime('%Y%m%d')
    DATE2 = dt.datetime.strptime(meta_dict['last_date'],
                                 '%Y-%m-%d').strftime('%Y%m%d')
    #end_date = dt.datetime.strptime(meta_dict['last_date'], '%Y-%m-%d')
    #if inps.update and (dt.datetime.utcnow() - end_date) < dt.timedelta(days=365):
    if inps.update:
        print 'Update mode is enabled, put endDate as XXXXXXXX.'
        DATE2 = 'XXXXXXXX'

    #outName = SAT+'_'+SW+'_'+RELORB+'_'+FRAME+'_'+DATE1+'-'+DATE2+'_'+TBASE+'_'+BPERP+'.he5'
    outName = SAT + '_' + SW + '_' + RELORB + '_' + FRAME + '_' + DATE1 + '_' + DATE2 + '.he5'

    if inps.subset:
        print 'Subset mode is enabled, put subset range info in output filename.'
        lat1 = float(meta_dict['Y_FIRST'])
        lon0 = float(meta_dict['X_FIRST'])
        lat0 = lat1 + float(meta_dict['Y_STEP']) * length
        lon1 = lon0 + float(meta_dict['X_STEP']) * width

        lat0Str = 'N%05d' % (round(lat0 * 1e3))
        lat1Str = 'N%05d' % (round(lat1 * 1e3))
        lon0Str = 'E%06d' % (round(lon0 * 1e3))
        lon1Str = 'E%06d' % (round(lon1 * 1e3))
        if lat0 < 0.0: lat0Str = 'S%05d' % (round(abs(lat0) * 1e3))
        if lat1 < 0.0: lat1Str = 'S%05d' % (round(abs(lat1) * 1e3))
        if lon0 < 0.0: lon0Str = 'W%06d' % (round(abs(lon0) * 1e3))
        if lon1 < 0.0: lon1Str = 'W%06d' % (round(abs(lon1) * 1e3))

        SUB = '_%s_%s_%s_%s' % (lat0Str, lat1Str, lon0Str, lon1Str)
        outName = os.path.splitext(outName)[0] + SUB + os.path.splitext(
            outName)[1]

    ##### Open HDF5 File
    print 'writing >>> ' + outName
    f = h5py.File(outName, 'w')
    hdfeos = f.create_group('HDFEOS')
    if 'Y_FIRST' in meta_dict.keys():
        gg_coord = hdfeos.create_group('GRIDS')
    else:
        gg_coord = hdfeos.create_group('SWATHS')
    group = gg_coord.create_group('timeseries')

    ##### Write Attributes to the HDF File
    print 'write metadata to ' + str(f)
    for key, value in meta_dict.iteritems():
        f.attrs[key] = value

    ##### Write Observation - Displacement
    groupObs = group.create_group('observation')
    print 'write data to ' + str(groupObs)

    disDset = np.zeros((dateNum, length, width), np.float32)
    for i in range(dateNum):
        sys.stdout.write('\rreading 3D displacement from file %s: %d/%d ...' %
                         (inps.timeseries_file, i + 1, dateNum))
        sys.stdout.flush()
        disDset[i] = h5_timeseries[k].get(dateList[i])[:]
    print ' '

    dset = groupObs.create_dataset('displacement',
                                   data=disDset,
                                   dtype=np.float32)
    dset.attrs['DATE_TIMESERIES'] = dateListStr
    dset.attrs['Title'] = 'Displacement time-series'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'meters'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Quality
    groupQ = group.create_group('quality')
    print 'write data to ' + str(groupQ)

    ## 1 - temporalCoherence
    print 'reading coherence       from file: ' + inps.coherence_file
    data = readfile.read(inps.coherence_file)[0]
    dset = groupQ.create_dataset('temporalCoherence',
                                 data=data,
                                 compression='gzip')
    dset.attrs['Title'] = 'Temporal Coherence'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = '1'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ## 2 - mask
    print 'reading mask            from file: ' + inps.mask_file
    data = readfile.read(inps.mask_file, epoch='mask')[0]
    dset = groupQ.create_dataset('mask', data=data, compression='gzip')
    dset.attrs['Title'] = 'Mask'
    dset.attrs['MissingValue'] = BOOL_ZERO
    dset.attrs['Units'] = '1'
    dset.attrs['_FillValue'] = BOOL_ZERO

    ##### Write Geometry
    ## Required: height, incidenceAngle
    ## Optional: rangeCoord, azimuthCoord, headingAngle, slantRangeDistance, waterMask, shadowMask
    groupGeom = group.create_group('geometry')
    print 'write data to ' + str(groupGeom)

    ## 1 - height
    print 'reading height          from file: ' + inps.dem_file
    data = readfile.read(inps.dem_file, epoch='height')[0]
    dset = groupGeom.create_dataset('height', data=data, compression='gzip')
    dset.attrs['Title'] = 'Digital elevatino model'
    dset.attrs['MissingValue'] = INT_ZERO
    dset.attrs['Units'] = 'meters'
    dset.attrs['_FillValue'] = INT_ZERO

    ## 2 - incidenceAngle
    print 'reading incidence angle from file: ' + inps.inc_angle_file
    data = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0]
    dset = groupGeom.create_dataset('incidenceAngle',
                                    data=data,
                                    compression='gzip')
    dset.attrs['Title'] = 'Incidence angle'
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'degrees'
    dset.attrs['_FillValue'] = FLOAT_ZERO

    ## 3 - rangeCoord
    try:
        data = readfile.read(inps.rg_coord_file,
                             epoch='rangeCoord',
                             print_msg=False)[0]
        print 'reading range coord     from file: ' + inps.rg_coord_file
        dset = groupGeom.create_dataset('rangeCoord',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Range Coordinates'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No rangeCoord found in file %s' % (inps.rg_coord_file)

    ## 4 - azimuthCoord
    try:
        data = readfile.read(inps.az_coord_file,
                             epoch='azimuthCoord',
                             print_msg=False)[0]
        print 'reading azimuth coord   from file: ' + inps.az_coord_file
        dset = groupGeom.create_dataset('azimuthCoord',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Azimuth Coordinates'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No azimuthCoord found in file %s' % (inps.az_coord_file)

    ## 5 - headingAngle
    try:
        data = readfile.read(inps.head_angle_file,
                             epoch='heandingAngle',
                             print_msg=False)[0]
        print 'reading azimuth coord   from file: ' + inps.head_angle_file
        dset = groupGeom.create_dataset('heandingAngle',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Heanding Angle'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'degrees'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No headingAngle found in file %s' % (inps.head_angle_file)

    ## 6 - slantRangeDistance
    try:
        data = readfile.read(inps.slant_range_dist_file,
                             epoch='slantRangeDistance',
                             print_msg=False)[0]
        print 'reading slant range distance from file: ' + inps.slant_range_dist_file
        dset = groupGeom.create_dataset('slantRangeDistance',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Slant Range Distance'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = FLOAT_ZERO
    except:
        print 'No slantRangeDistance found in file %s' % (
            inps.slant_range_dist_file)

    ## 7 - waterMask
    try:
        data = readfile.read(inps.water_mask_file,
                             epoch='waterMask',
                             print_msg=False)[0]
        print 'reading water mask      from file: ' + inps.water_mask_file
        dset = groupGeom.create_dataset('waterMask',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Water Mask'
        dset.attrs['MissingValue'] = BOOL_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = BOOL_ZERO
    except:
        print 'No waterMask found in file %s' % (inps.water_mask_file)

    ## 8 - shadowMask
    try:
        data = readfile.read(inps.shadow_mask_file,
                             epoch='shadowMask',
                             print_msg=False)[0]
        print 'reading shadow mask     from file: ' + inps.shadow_mask_file
        dset = groupGeom.create_dataset('shadowMask',
                                        data=data,
                                        compression='gzip')
        dset.attrs['Title'] = 'Shadow Mask'
        dset.attrs['MissingValue'] = BOOL_ZERO
        dset.attrs['Units'] = '1'
        dset.attrs['_FillValue'] = BOOL_ZERO
    except:
        print 'No shadowMask found in file %s' % (inps.shadow_mask_file)

    f.close()
    print 'Done.'
    return
Пример #6
0
def main(argv):
    inps = cmdLineParse()

    #print '\n**************** Output to UNAVCO **************'
    ##### Prepare Metadata
    pysar_meta_dict = readfile.read_attribute(inps.timeseries)
    k = pysar_meta_dict['FILE_TYPE']
    h5_timeseries = h5py.File(inps.timeseries, 'r')
    dateList = sorted(h5_timeseries[k].keys())
    unavco_meta_dict = metadata_pysar2unavco(pysar_meta_dict, dateList)
    print '## UNAVCO Metadata:'
    print '-----------------------------------------'
    info.print_attributes(unavco_meta_dict)

    meta_dict = pysar_meta_dict.copy()
    meta_dict.update(unavco_meta_dict)

    #### Open HDF5 File
    SAT = meta_dict['mission']
    SW = meta_dict[
        'beam_mode']  # should be like FB08 for ALOS, need to find out, Yunjun, 2016-12-26
    RELORB = "%03d" % (int(meta_dict['relative_orbit']))
    FRAME = "%04d" % (int(meta_dict['frame']))
    DATE1 = dt.strptime(meta_dict['first_date'], '%Y-%m-%d').strftime('%Y%m%d')
    DATE2 = dt.strptime(meta_dict['last_date'], '%Y-%m-%d').strftime('%Y%m%d')
    TBASE = "%04d" % (0)
    BPERP = "%05d" % (0)
    outName = SAT + '_' + SW + '_' + RELORB + '_' + FRAME + '_' + DATE1 + '-' + DATE2 + '_' + TBASE + '_' + BPERP + '.he5'

    print '-----------------------------------------'
    print 'writing >>> ' + outName
    f = h5py.File(outName, 'w')
    hdfeos = f.create_group('HDFEOS')
    if 'Y_FIRST' in meta_dict.keys():
        gg_coord = hdfeos.create_group('GRIDS')
    else:
        gg_coord = hdfeos.create_group('SWATHS')
    group = gg_coord.create_group('timeseries')

    ##### Write Attributes to the HDF File
    print 'write metadata to ' + str(f)
    for key, value in meta_dict.iteritems():
        f.attrs[key] = value

    print 'write data to ' + str(group)
    ##### Write Time Series Data
    print 'reading file: ' + inps.timeseries
    print 'number of acquisitions: %d' % len(dateList)
    for date in dateList:
        print date
        data = h5_timeseries[k].get(date)[:, :]
        dset = group.create_dataset(date, data=data, compression='gzip')
        dset.attrs['Title'] = 'Time series displacement'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Incidence_Angle
    if os.path.isfile(inps.incidence_angle):
        print 'reading file: ' + inps.incidence_angle
        inc_angle, inc_angle_meta = readfile.read(inps.incidence_angle)
        dset = group.create_dataset('incidence_angle',
                                    data=inc_angle,
                                    compression='gzip')
        dset.attrs['Title'] = 'Incidence angle'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'degrees'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write DEM
    if os.path.isfile(inps.dem):
        print 'reading file: ' + inps.dem
        dem, dem_meta = readfile.read(inps.dem)
        dset = group.create_dataset('dem', data=dem, compression='gzip')
        dset.attrs['Title'] = 'Digital elevatino model'
        dset.attrs['MissingValue'] = INT_ZERO
        dset.attrs['Units'] = 'meters'
        dset.attrs['_FillValue'] = INT_ZERO

    ##### Write Coherence
    if os.path.isfile(inps.coherence):
        print 'reading file: ' + inps.coherence
        coherence, coherence_meta = readfile.read(inps.coherence)
        dset = group.create_dataset('coherence',
                                    data=coherence,
                                    compression='gzip')
        dset.attrs['Title'] = 'Temporal Coherence'
        dset.attrs['MissingValue'] = FLOAT_ZERO
        dset.attrs['Units'] = 'None'
        dset.attrs['_FillValue'] = FLOAT_ZERO

    ##### Write Mask
    if os.path.isfile(inps.mask):
        print 'reading file: ' + inps.mask
        mask, mask_meta = readfile.read(inps.mask)
        dset = group.create_dataset('mask', data=mask, compression='gzip')
        dset.attrs['Title'] = 'Mask'
        dset.attrs['MissingValue'] = INT_ZERO
        dset.attrs['Units'] = 'None'
        dset.attrs['_FillValue'] = INT_ZERO

    f.close()
    print 'Done.'
    return