def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv( 'TSSARDIR' ) + '/' + projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath = templateContents['pysar.inputdata'] igramPath = check_variable_name(igramPath) except: igramPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath = templateContents['pysar.CorFiles'] corPath = check_variable_name(corPath) except: corPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath = templateContents['pysar.wrapped'] wrapPath = check_variable_name(wrapPath) except: wrapPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir + '/LoadedData.h5'): print '\nLoadedData.h5' + ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k, igramList) # number check igramList, mode_width, mode_length = check_size( k, igramList) # size check h5file = tssarProjectDir + '/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero = np.ones([int(mode_length), int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp, unw, unwrsc = readfile.read_float32(igram) MaskZero = amp * MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( igram) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(igram) Mask = np.ones([int(mode_length), int(mode_width)]) Mask[MaskZero == 0] = 0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file, 'w') group = h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir + '/Coherence.h5'): print '\nCoherence.h5' + ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k, corList) # number check corList, mode_width, mode_length = check_size(k, corList) # size check h5file = tssarProjectDir + '/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence = np.zeros([int(mode_length), int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp, unw, unwrsc = readfile.read_float32(cor) meanCoherence = meanCoherence + unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( cor) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(cor) meanCoherence = meanCoherence / (len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir + '/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean, 'w') group = fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir + '/Wrapped.h5'): print '\nWrapped.h5' + ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k, wrapList) # number check wrapList, mode_width, mode_length = check_size(k, wrapList) # size check h5file = tssarProjectDir + '/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp, unw, unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( wrap) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed ' + str( len(wrapList)) + ' wrapped interferograms to ' + h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile = templateContents['pysar.geomap'] geomapFile = check_variable_name(geomapFile) cpCmd = "cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): color_map='jet' disp_opposite='no' try: opts, args = getopt.getopt(argv,"h:f:d:o:x:y:m:M:i:c:") except getopt.GetoptError: Usage() ; sys.exit(1) if opts==[]: Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-d': demFile=arg elif opt=='-m': Vmin=float(arg) elif opt=='-M': Vmax=float(arg) elif opt == '-x': winx=arg.split(':') elif opt == '-y': winy = arg.split(':') elif opt == '-o': outName = arg elif opt == '-i': disp_opposite = arg elif opt == '-c': color_map=arg h5file=h5py.File(File,'r') k=h5file.keys() print k[0] # ccmap=plt.get_cmap(color_map) ################################################ cdict1 = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.0), (0.6, 1.0, 1.0), (0.8, 1.0, 1.0), (1.0, 0.5, 0.5)), 'green': ((0.0, 0.0, 0.0), (0.2, 0.0, 0.0), (0.4, 1.0, 1.0), (0.6, 1.0, 1.0), (0.8, 0.0, 0.0), (1.0, 0.0, 0.0)), 'blue': ((0.0, 0.5, .5), (0.2, 1.0, 1.0), (0.4, 1.0, 1.0), (0.5, 0.0, 0.0), (1.0, 0.0, 0.0),) } if color_map =='pysar_hsv': ccmap = LinearSegmentedColormap('BlueRed1', cdict1) else: ccmap=plt.get_cmap(color_map) print 'colormap is : '+ color_map ################################################ dset = h5file[k[0]].get(k[0]) data=dset[0:dset.shape[0],0:dset.shape[1]] if disp_opposite in('yes','Yes','Y','y','YES'): data=-1*data try: xref=h5file[k[0]].attrs['ref_x'] yref=h5file[k[0]].attrs['ref_y'] except: print 'No reference point' try: ullon=float(h5file[k[0]].attrs['X_FIRST']) ullat=float(h5file[k[0]].attrs['Y_FIRST']) lon_step=float(h5file[k[0]].attrs['X_STEP']) lat_step=float(h5file[k[0]].attrs['Y_STEP']) lon_unit=h5file[k[0]].attrs['Y_UNIT'] lat_unit=h5file[k[0]].attrs['X_UNIT'] llcrnrlon=ullon llcrnrlat=ullat+lat_step*data.shape[0] urcrnrlon=ullon+lon_step*data.shape[1] urcrnrlat=ullat geocoord='yes' print 'Input file is Geocoded' except: geocoord='no' fig = plt.figure() ax = fig.add_axes([0.1,0.1,0.8,0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl',suppress_ticks=False,ax=ax) print demFile demFile if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) ################################################################# try: winx wx=[int(i) for i in win_x.split()] dem=dem[:,wx[0]:wx[1]] data=data[:,wx[0]:wx[1]] ullon=float(h5file[k[0]].attrs['X_FIRST'])+wx[0] llcrnrlon=ullon urcrnrlon=ullon+lon_step*data.shape[1] except: print '' try: winy wy=[int(i) for i in winy.split()] dem=dem[wy[0]:wy[1],:] data=data[wy[0]:wy[1],:] except: print '' ################################################################ fig = plt.figure() ax = fig.add_axes([0.1,0.1,0.8,0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl',suppress_ticks=False,ax=ax) cmap_dem=plt.get_cmap('gray') m.imshow(ut.hillshade(np.flipud(dem),50.0),cmap=cmap_dem) try: im=m.imshow(np.flipud(data),vmin=Vmin,vmax=Vmax,cmap=ccmap) # cb = m.colorbar(im,"right", size="5%", pad='2%') except: im=m.imshow(np.flipud(data)) # cb = m.colorbar(im,"right", size="5%", pad='2%') # m.bluemarble() # cb = m.colorbar(im,"right", size="5%", pad='2%') # parallels = np.arange(31.,34,0.5) # m.drawparallels(parallels,labels=[1,0,0,1],linewidth=0.0) # meridians = np.arange(-115.,-112.,0.5) # m.drawmeridians(meridians,labels=[1,0,0,1],linewidth=0.0) # m.drawmapscale() # m = Basemap(llcrnrlon=-110.,llcrnrlat=0.,urcrnrlon=-20.,urcrnrlat=57., # projection='lcc',lat_1=20.,lat_2=40.,lon_0=-60., # resolution ='l',area_thresh=1000.) # m.drawcoastlines() # m.drawcountries() # m.drawmapboundary(fill_color='#99ffff') # m.fillcontinents(color='#cc9966',lake_color='#99ffff') # m.drawparallels(np.arange(10,70,20),labels=[1,1,0,0]) # m.drawmeridians(np.arange(-100,0,20),labels=[0,0,0,1]) # plt.title('Atlantic Hurricane Tracks (Storms Reaching Category 4, 1851-2004)') try: figName = outName except: outName=os.path.basename(File).replace('.h5','') figName = outName + '.png' plt.savefig(figName,pad_inches=0.0) # plt.show() h5file.close()
def main(argv): color_map='jet' disp_opposite='no' try: opts, args = getopt.getopt(argv,"h:f:d:o:x:y:m:M:i:c:") except getopt.GetoptError: Usage() ; sys.exit(1) if opts==[]: Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage(); sys.exit() elif opt == '-f': File = arg elif opt == '-d': demFile=arg elif opt == '-m': Vmin=float(arg) elif opt == '-M': Vmax=float(arg) elif opt == '-x': winx=arg.split(':') elif opt == '-y': winy = arg.split(':') elif opt == '-o': outName = arg elif opt == '-i': disp_opposite = arg elif opt == '-c': color_map=arg h5file=h5py.File(File,'r') k=h5file.keys() print k[0] #ccmap=plt.get_cmap(color_map) ################################################ cdict1 = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.0), (0.6, 1.0, 1.0), (0.8, 1.0, 1.0), (1.0, 0.5, 0.5)), 'green': ((0.0, 0.0, 0.0), (0.2, 0.0, 0.0), (0.4, 1.0, 1.0), (0.6, 1.0, 1.0), (0.8, 0.0, 0.0), (1.0, 0.0, 0.0)), 'blue': ((0.0, 0.5, .5), (0.2, 1.0, 1.0), (0.4, 1.0, 1.0), (0.5, 0.0, 0.0), (1.0, 0.0, 0.0),) } if color_map =='pysar_hsv': ccmap = LinearSegmentedColormap('BlueRed1', cdict1) else: ccmap=plt.get_cmap(color_map) print 'colormap is : '+ color_map ################################################ dset = h5file[k[0]].get(k[0]) data=dset[0:dset.shape[0],0:dset.shape[1]] if disp_opposite in('yes','Yes','Y','y','YES'): data=-1*data try: xref=h5file[k[0]].attrs['ref_x'] yref=h5file[k[0]].attrs['ref_y'] except: print 'No reference point' try: ullon=float(h5file[k[0]].attrs['X_FIRST']) ullat=float(h5file[k[0]].attrs['Y_FIRST']) lon_step=float(h5file[k[0]].attrs['X_STEP']) lat_step=float(h5file[k[0]].attrs['Y_STEP']) lon_unit=h5file[k[0]].attrs['Y_UNIT'] lat_unit=h5file[k[0]].attrs['X_UNIT'] llcrnrlon=ullon llcrnrlat=ullat+lat_step*data.shape[0] urcrnrlon=ullon+lon_step*data.shape[1] urcrnrlat=ullat geocoord='yes' print 'Input file is Geocoded' except: geocoord='no' fig = plt.figure() ax = fig.add_axes([0.1,0.1,0.8,0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl',suppress_ticks=False,ax=ax) print demFile demFile if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_real_int16(demFile) ################################################################# try: winx wx=[int(i) for i in win_x.split()] dem=dem[:,wx[0]:wx[1]] data=data[:,wx[0]:wx[1]] ullon=float(h5file[k[0]].attrs['X_FIRST'])+wx[0] llcrnrlon=ullon urcrnrlon=ullon+lon_step*data.shape[1] except: print '' try: winy wy=[int(i) for i in winy.split()] dem=dem[wy[0]:wy[1],:] data=data[wy[0]:wy[1],:] except: print '' ################################################################ fig = plt.figure() ax = fig.add_axes([0.1,0.1,0.8,0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl',suppress_ticks=False,ax=ax) cmap_dem=plt.get_cmap('gray') m.imshow(ut.hillshade(np.flipud(dem),50.0),cmap=cmap_dem) try: im=m.imshow(np.flipud(data),vmin=Vmin,vmax=Vmax,cmap=ccmap) # cb = m.colorbar(im,"right", size="5%", pad='2%') except: im=m.imshow(np.flipud(data)) try: figName = outName except: outName=os.path.basename(File).replace('.h5','') figName = outName + '.png' plt.savefig(figName,pad_inches=0.0) #plt.show() h5file.close()
def main(argv): color_map = 'jet' disp_opposite = 'no' try: opts, args = getopt.getopt(argv, "h:f:d:o:x:y:m:M:i:c:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-d': demFile = arg elif opt == '-m': Vmin = float(arg) elif opt == '-M': Vmax = float(arg) elif opt == '-x': winx = arg.split(':') elif opt == '-y': winy = arg.split(':') elif opt == '-o': outName = arg elif opt == '-i': disp_opposite = arg elif opt == '-c': color_map = arg h5file = h5py.File(File, 'r') k = h5file.keys() print k[0] # ccmap=plt.get_cmap(color_map) ################################################ cdict1 = { 'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.0), (0.6, 1.0, 1.0), (0.8, 1.0, 1.0), (1.0, 0.5, 0.5)), 'green': ((0.0, 0.0, 0.0), (0.2, 0.0, 0.0), (0.4, 1.0, 1.0), (0.6, 1.0, 1.0), (0.8, 0.0, 0.0), (1.0, 0.0, 0.0)), 'blue': ( (0.0, 0.5, .5), (0.2, 1.0, 1.0), (0.4, 1.0, 1.0), (0.5, 0.0, 0.0), (1.0, 0.0, 0.0), ) } if color_map == 'pysar_hsv': ccmap = LinearSegmentedColormap('BlueRed1', cdict1) else: ccmap = plt.get_cmap(color_map) print 'colormap is : ' + color_map ################################################ dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0], 0:dset.shape[1]] if disp_opposite in ('yes', 'Yes', 'Y', 'y', 'YES'): data = -1 * data try: xref = h5file[k[0]].attrs['ref_x'] yref = h5file[k[0]].attrs['ref_y'] except: print 'No reference point' try: ullon = float(h5file[k[0]].attrs['X_FIRST']) ullat = float(h5file[k[0]].attrs['Y_FIRST']) lon_step = float(h5file[k[0]].attrs['X_STEP']) lat_step = float(h5file[k[0]].attrs['Y_STEP']) lon_unit = h5file[k[0]].attrs['Y_UNIT'] lat_unit = h5file[k[0]].attrs['X_UNIT'] llcrnrlon = ullon llcrnrlat = ullat + lat_step * data.shape[0] urcrnrlon = ullon + lon_step * data.shape[1] urcrnrlat = ullat geocoord = 'yes' print 'Input file is Geocoded' except: geocoord = 'no' fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl', suppress_ticks=False, ax=ax) print demFile demFile if os.path.basename(demFile).split('.')[1] == 'hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1] == 'dem': dem, demRsc = readfile.read_dem(demFile) ################################################################# try: winx wx = [int(i) for i in win_x.split()] dem = dem[:, wx[0]:wx[1]] data = data[:, wx[0]:wx[1]] ullon = float(h5file[k[0]].attrs['X_FIRST']) + wx[0] llcrnrlon = ullon urcrnrlon = ullon + lon_step * data.shape[1] except: print '' try: winy wy = [int(i) for i in winy.split()] dem = dem[wy[0]:wy[1], :] data = data[wy[0]:wy[1], :] except: print '' ################################################################ fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat, resolution='l', area_thresh=1., projection='cyl', suppress_ticks=False, ax=ax) cmap_dem = plt.get_cmap('gray') m.imshow(ut.hillshade(np.flipud(dem), 50.0), cmap=cmap_dem) try: im = m.imshow(np.flipud(data), vmin=Vmin, vmax=Vmax, cmap=ccmap) # cb = m.colorbar(im,"right", size="5%", pad='2%') except: im = m.imshow(np.flipud(data)) # cb = m.colorbar(im,"right", size="5%", pad='2%') # m.bluemarble() # cb = m.colorbar(im,"right", size="5%", pad='2%') # parallels = np.arange(31.,34,0.5) # m.drawparallels(parallels,labels=[1,0,0,1],linewidth=0.0) # meridians = np.arange(-115.,-112.,0.5) # m.drawmeridians(meridians,labels=[1,0,0,1],linewidth=0.0) # m.drawmapscale() # m = Basemap(llcrnrlon=-110.,llcrnrlat=0.,urcrnrlon=-20.,urcrnrlat=57., # projection='lcc',lat_1=20.,lat_2=40.,lon_0=-60., # resolution ='l',area_thresh=1000.) # m.drawcoastlines() # m.drawcountries() # m.drawmapboundary(fill_color='#99ffff') # m.fillcontinents(color='#cc9966',lake_color='#99ffff') # m.drawparallels(np.arange(10,70,20),labels=[1,1,0,0]) # m.drawmeridians(np.arange(-100,0,20),labels=[0,0,0,1]) # plt.title('Atlantic Hurricane Tracks (Storms Reaching Category 4, 1851-2004)') try: figName = outName except: outName = os.path.basename(File).replace('.h5', '') figName = outName + '.png' plt.savefig(figName, pad_inches=0.0) # plt.show() h5file.close()
def main(argv): try: File = argv[0] demFile=argv[1] p=int(argv[2]) except: Usage() ; sys.exit(1) try: baseline_error=argv[3] except: baseline_error='range_and_azimuth' ################################## h5file = h5py.File(File) dateList = h5file['timeseries'].keys() ################################## try: maskFile=argv[4] h5Mask = h5py.File(maskFile,'r') kMask=h5Mask.keys() dset1 = h5Mask[kMask[0]].get(kMask[0]) Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] except: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] # try: # maskFile=argv[3] # except: # maskFile='Mask.h5' # try: # baseline_error=argv[4] # except: # baseline_error='range_and_azimuth' print baseline_error ################################## # h5Mask = h5py.File(maskFile) # kMask=h5Mask.keys() # dset1 = h5Mask[kMask[0]].get(kMask[0]) # Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Mask=Mask.flatten(1) ndx= Mask !=0 ################################## # h5file = h5py.File(File) # dateList = h5file['timeseries'].keys() ################################## nt=float(h5file['timeseries'].attrs['LOOK_REF1']) ft=float(h5file['timeseries'].attrs['LOOK_REF2']) sy,sx=np.shape(dset1) npixel=sx*sy lookangle=np.tile(np.linspace(nt,ft,sx),[sy,1]) lookangle=lookangle.flatten(1)*np.pi/180.0 Fh=-np.sin(lookangle) Fv=-np.cos(lookangle) print 'Looking for azimuth pixel size' try: daz=float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE']) except: print''' ERROR! The attribute AZIMUTH_PIXEL_SIZE was not found! Possible cause of error: Geo coordinate. This function works only in radar coordinate system. ''' sys.exit(1) lines=np.tile(np.arange(0,sy,1),[1,sx]) lines=lines.flatten(1) rs=lines*daz if baseline_error=='range_and_azimuth': A = np.zeros([npixel,4]) A[:,0]=Fh A[:,1]=Fh*rs A[:,2]=Fv A[:,3]=Fv*rs num_base_par=4 elif baseline_error=='range': A = np.zeros([npixel,2]) A[:,0]=Fh A[:,1]=Fv num_base_par=2 ########################################### yref=int(h5file['timeseries'].attrs['ref_y']) xref=int(h5file['timeseries'].attrs['ref_x']) ########################################### if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) dem=dem-dem[yref][xref] dem=dem.flatten(1) ################################################### if p==1: # A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem,np.ones(len(dem)))).T elif p==2: # A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**2,dem,np.ones(len(dem)))).T elif p==3: # A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**3,dem**2,dem,np.ones(len(dem)))).T print np.shape(A) Ainv=np.linalg.pinv(A) ################################################### Bh=[] Bv=[] Bhrate=[] Bvrate=[] Be=np.zeros([len(dateList),num_base_par+p+1]) print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' for i in range(1,len(dateList)): dset = h5file['timeseries'].get(dateList[i]) data = dset[0:dset.shape[0],0:dset.shape[1]] L = data.flatten(1) M=np.hstack((A,B)) Berror=np.dot(np.linalg.pinv(M[ndx]),L[ndx]) Bh.append(Berror[0]) Bhrate.append(Berror[1]) Bv.append(Berror[2]) Bvrate.append(Berror[3]) Be[i,:]=Berror print Berror print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' print 'baseline error mean std' print ' bh : ' +str(np.mean(Bh)) + ' , '+str(np.std(Bh)) print ' bh rate : ' +str(np.mean(Bhrate)) + ' , '+str(np.std(Bhrate)) print ' bv : ' +str(np.mean(Bv)) + ' , '+str(np.std(Bv)) print ' bv rate : ' +str(np.mean(Bvrate)) + ' , '+str(np.std(Bvrate)) print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' # plt.hist(Bh,bins=8,normed=True) # formatter = FuncFormatter(to_percent) # Set the formatter # plt.gca().yaxis.set_major_formatter(formatter) # plt.show() print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' # print 'Estimating Baseline error from each differences ...' # print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' orbEffect=np.zeros([len(dateList),sy,sx]) for i in range(1,len(dateList)): effect=np.dot(M,Be[i,:]) effect=np.reshape(effect,[sx,sy]).T # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref] orbEffect[i,:,:]=effect - effect[yref,xref] del effect print 'Correctiing the time series ' outName=File.replace('.h5','')+'_BaseTropCor.h5' h5orbCor=h5py.File(outName,'w') group = h5orbCor.create_group('timeseries') for i in range(len(dateList)): dset1 = h5file['timeseries'].get(dateList[i]) data = dset1[0:dset1.shape[0],0:dset1.shape[1]] - orbEffect[i,:,:] dset = group.create_dataset(dateList[i], data=data, compression='gzip') for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') group=h5orbCor.create_group('mask') dset = group.create_dataset('mask', data=dset1, compression='gzip') h5file.close() h5orbCor.close()
def main(argv): try: file = argv[0] alks = float(argv[1]) rlks = float(argv[2]) except: Usage() sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_a' + str(int(alks)) + 'lks_r' + str( int(rlks)) + 'lks' + ext if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks = multilook(unw, alks, rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] masklks = multilook(mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks = multilook(unw, alks, rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'multi looking the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH'] = Masklks.shape[1] group.attrs['FILE_LENGTH'] = Masklks.shape[0] h5file.close() h5file_lks.close()
def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv('TSSARDIR') +'/'+projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv('SCRATCHDIR') + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath=templateContents['pysar.inputdata'] igramPath=check_variable_name(igramPath) except: igramPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath=templateContents['pysar.CorFiles'] corPath=check_variable_name(corPath) except: corPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath=templateContents['pysar.wrapped'] wrapPath=check_variable_name(wrapPath) except: wrapPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir+'/LoadedData.h5'): print '\nLoadedData.h5'+ ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k,igramList) # number check igramList,mode_width,mode_length = check_size(k,igramList) # size check h5file = tssarProjectDir+'/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero=np.ones([int(mode_length),int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp,unw,unwrsc = readfile.read_float32(igram) MaskZero=amp*MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(igram)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(igram) Mask=np.ones([int(mode_length),int(mode_width)]) Mask[MaskZero==0]=0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file,'w') group=h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir+'/Coherence.h5'): print '\nCoherence.h5'+ ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k,corList) # number check corList,mode_width,mode_length = check_size(k,corList) # size check h5file = tssarProjectDir+'/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence=np.zeros([int(mode_length),int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp,unw,unwrsc = readfile.read_float32(cor) meanCoherence=meanCoherence+unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(cor)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(cor) meanCoherence=meanCoherence/(len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir+'/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean,'w') group=fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir+'/Wrapped.h5'): print '\nWrapped.h5'+ ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k,wrapList) # number check wrapList,mode_width,mode_length = check_size(k,wrapList) # size check h5file = tssarProjectDir+'/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp,unw,unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(wrap)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed '+str(len(wrapList))+' wrapped interferograms to '+h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile=templateContents['pysar.geomap'] geomapFile=check_variable_name(geomapFile) cpCmd="cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd="cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): try: file=argv[0] geomap=argv[1] except: Usage();sys.exit(1) fileName=os.path.basename(file).split('.')[0] h5file=h5py.File(file,'r') k=h5file.keys() if k[0] in ('velocity','temporal_coherence','mask','rmse') and 'timeseries' not in k: dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0],0:dset.shape[1]] outname=fileName+'.unw' print 'writing to roi_pac unw file format' writefile.write_float32(data,outname) f = open(outname+'.rsc','w') f.write('FILE_LENGTH '+str(data.shape[0])+'\n') f.write('WIDTH '+str(data.shape[1])+'\n') f.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd os.system(geoCmd) print 'reading geocoded file and write it to h5 format' amp,unw,unwrsc = readfile.read_float32('geo_'+outname) rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd f = h5py.File('geo_'+file,'w') group=f.create_group(k[0]) dset = group.create_dataset(k[0], data=unw, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value for key,value in unwrsc.iteritems(): group.attrs[key] = value f.close() h5file.close() elif 'timeseries' in k: print 'geocoding timeseries:' outname='epoch_temp.unw' f = h5py.File('geo_'+file,'w') group = f.create_group('timeseries') epochList=h5file['timeseries'].keys() for epoch in epochList: print 'geocoding '+epoch d = h5file['timeseries'].get(epoch) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f = open(outname+'.rsc','w') f.write('FILE_LENGTH '+str(data.shape[0])+'\n') f.write('WIDTH '+str(data.shape[1])+'\n') f.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) dset = group.create_dataset(epoch, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] =unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] =unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'interferograms': print 'geocoding interferograms:' outname='igram_temp.unw' f = h5py.File('geo_'+file,'w') gg = f.create_group('interferograms') igramList=h5file[k[0]].keys() for igram in igramList: print 'geocoding '+igram group = gg.create_group('geo_'+igram) d = h5file['interferograms'][igram].get(igram) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) if igram==igramList[0]: MaskZero=np.ones([unw.shape[0],unw.shape[1]]) MaskZero=amp*MaskZero dset = group.create_dataset('geo_'+igram, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value # for key,value in unwrsc.iteritems(): # group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] Mask=np.ones(MaskZero.shape) Mask[MaskZero==0]=0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() h5file.close() elif k[0] == 'coherence': print 'geocoding coherence:' outname='cor_temp.unw' f = h5py.File('geo_'+file,'w') gg = f.create_group(k[0]) corList=h5file[k[0]].keys() for cor in corList: print 'geocoding '+cor group = gg.create_group('geo_'+cor) d = h5file[k[0]][cor].get(cor) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) dset = group.create_dataset('geo_'+cor, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'wrapped': print 'geocoding wrapped interferograms:' outname='wrap_temp.int' f = h5py.File('geo_'+file,'w') gg = f.create_group(k[0]) wrapList=h5file[k[0]].keys() for wrap in wrapList: print 'geocoding '+wrap group = gg.create_group('geo_'+wrap) d = h5file[k[0]][wrap].get(wrap) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_complex64(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_complex64('geo_'+outname) dset = group.create_dataset('geo_'+wrap, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file[k[0]][wrap].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close()
Usage: load_dem.py input [output] Example: load_dem.py SanAndreas.dem load_dem.py SanAndreas.dem SanAndreas.h5 load_dem.py radar_8rlks.hgt radar_8rlks.h5 ***************************************************************** ''') sys.exit(1) ext = os.path.splitext(demFile)[1] if ext == '.hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif ext == '.dem': dem, demRsc = readfile.read_real_int16(demFile) try: outName = sys.argv[2] except: outName = 'dem.h5' print('writing >>> ' + outName) h5 = h5py.File(outName, 'w') group = h5.create_group('dem') dset = group.create_dataset('dem', data=dem, compression='gzip') for key, value in demRsc.items(): group.attrs[key] = value
def main(argv): try: file = argv[0] geomap = argv[1] except: Usage() sys.exit(1) fileName = os.path.basename(file).split('.')[0] h5file = h5py.File(file, 'r') k = h5file.keys() if k[0] in ('velocity', 'temporal_coherence', 'mask', 'rmse') and 'timeseries' not in k: dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0], 0:dset.shape[1]] outname = fileName + '.unw' print 'writing to roi_pac unw file format' writefile.write_float32(data, outname) f = open(outname + '.rsc', 'w') f.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f.write('WIDTH ' + str(data.shape[1]) + '\n') f.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and write it to h5 format' amp, unw, unwrsc = readfile.read_float32('geo_' + outname) rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd f = h5py.File('geo_' + file, 'w') group = f.create_group(k[0]) dset = group.create_dataset(k[0], data=unw, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value for key, value in unwrsc.iteritems(): group.attrs[key] = value f.close() h5file.close() elif 'timeseries' in k: print 'geocoding timeseries:' outname = 'epoch_temp.unw' f = h5py.File('geo_' + file, 'w') group = f.create_group('timeseries') epochList = h5file['timeseries'].keys() for epoch in epochList: print 'geocoding ' + epoch d = h5file['timeseries'].get(epoch) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f = open(outname + '.rsc', 'w') f.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f.write('WIDTH ' + str(data.shape[1]) + '\n') f.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) dset = group.create_dataset(epoch, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'interferograms': print 'geocoding interferograms:' outname = 'igram_temp.unw' f = h5py.File('geo_' + file, 'w') gg = f.create_group('interferograms') igramList = h5file[k[0]].keys() for igram in igramList: print 'geocoding ' + igram group = gg.create_group('geo_' + igram) d = h5file['interferograms'][igram].get(igram) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) if igram == igramList[0]: MaskZero = np.ones([unw.shape[0], unw.shape[1]]) MaskZero = amp * MaskZero dset = group.create_dataset('geo_' + igram, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file['interferograms'][igram].attrs.iteritems( ): group.attrs[key] = value # for key,value in unwrsc.iteritems(): # group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] Mask = np.ones(MaskZero.shape) Mask[MaskZero == 0] = 0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() h5file.close() elif k[0] == 'coherence': print 'geocoding coherence:' outname = 'cor_temp.unw' f = h5py.File('geo_' + file, 'w') gg = f.create_group(k[0]) corList = h5file[k[0]].keys() for cor in corList: print 'geocoding ' + cor group = gg.create_group('geo_' + cor) d = h5file[k[0]][cor].get(cor) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) dset = group.create_dataset('geo_' + cor, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'wrapped': print 'geocoding wrapped interferograms:' outname = 'wrap_temp.int' f = h5py.File('geo_' + file, 'w') gg = f.create_group(k[0]) wrapList = h5file[k[0]].keys() for wrap in wrapList: print 'geocoding ' + wrap group = gg.create_group('geo_' + wrap) d = h5file[k[0]][wrap].get(wrap) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_complex64(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_complex64('geo_' + outname) dset = group.create_dataset('geo_' + wrap, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file[k[0]][wrap].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close()
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': file = arg elif opt == '-t': filtType = arg elif opt == '-p': par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_' + filtType + ext try: par except: par = [] print '+++++++++++++++++++++++++++' print 'Filter type : ' + filtType print 'parameters : ' + str(par) print '+++++++++++++++++++++++++++' ############################################### if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Filtering the interferograms in space' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unwSet = h5file['interferograms'][igram].get(igram) unw = unwSet[0:unwSet.shape[0], 0:unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=mask, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Filtering the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d dset1 = h5file['timeseries'].get(d) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Mask, compression='gzip') except: print 'Filterd file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'filtering the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): try: opts, args = getopt.getopt( argv, "h:D:O:G:S:f:m:M:l:u:s:c:e:d:r:p:w:i:j:t:R:a:b:k:x:y:") except getopt.GetoptError: Usage() sys.exit(1) flip_lr = 'no' flip_ud = 'no' disp_geo = 'no' font_size = 8 color_map = 'jet' figs_rows = 5 figs_cols = 8 rewrapping = 'yes' allData2display = 'yes' Wspace = 0.1 Hspace = 0.1 title = 'out' # title = 'None' showRef = 'yes' ref_color = 'k' ref_symbol = 's' ref_size = 10 dip_opposite = 'no' saveFig = 'no' if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-D': demFile = arg elif opt == '-m': min = float(arg) elif opt == '-M': max = float(arg) elif opt == '-l': flip_lr = arg elif opt == '-u': flip_ud = arg elif opt == '-s': font_size = int(arg) elif opt == '-c': color_map = arg elif opt == '-e': epoch_number = int(arg) allData2display = 'no' elif opt == '-d': epoch_date = arg allData2display = 'no' elif opt == '-r': figs_rows = int(arg) elif opt == '-p': figs_cols = int(arg) elif opt == '-w': rewrapping = arg elif opt == '-i': Wspace = float(arg) elif opt == '-j': Hspace = float(arg) elif opt == '-t': title = arg elif opt == '-R': showRef = arg elif opt == '-a': ref_color = arg elif opt == '-b': ref_symbol = arg elif opt == 'k': ref_size = int(arg) elif opt == '-x': win_x = arg elif opt == '-y': win_y = arg elif opt == '-G': disp_geo = arg elif opt == '-O': dip_opposite = arg elif opt == '-S': saveFig = arg h5file = h5py.File(File, 'r') k = h5file.keys() print k if color_map == 'hsv': ################################################ cdict1 = { 'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.0), (0.6, 1.0, 1.0), (0.8, 1.0, 1.0), (1.0, 0.5, 0.5)), 'green': ((0.0, 0.0, 0.0), (0.2, 0.0, 0.0), (0.4, 1.0, 1.0), (0.6, 1.0, 1.0), (0.8, 0.0, 0.0), (1.0, 0.0, 0.0)), 'blue': ( (0.0, 0.5, .5), (0.2, 1.0, 1.0), (0.4, 1.0, 1.0), (0.5, 0.0, 0.0), (1.0, 0.0, 0.0), ) } ccmap = LinearSegmentedColormap('BlueRed1', cdict1) ################################################ else: ccmap = plt.get_cmap(color_map) #################################################################### #################################################################### # if k[0]=='velocity' or k[0]=='temporal_coherence' or k[0]=='rmse': if len(k) == 1 and k[0] in ('dem', 'velocity', 'mask', 'temporal_coherence', 'rmse'): dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0], 0:dset.shape[1]] if dip_opposite in ('yes', 'Yes', 'Y', 'y', 'YES'): data = -1 * data try: xref = h5file[k[0]].attrs['ref_x'] yref = h5file[k[0]].attrs['ref_y'] except: print 'No reference point' # Yunjun, Mar 2015 try: xref = xref - h5file[k[0]].attrs['subset_x0'] yref = yref - h5file[k[0]].attrs['subset_y0'] except: print 'No subset' try: ullon = float(h5file[k[0]].attrs['X_FIRST']) ullat = float(h5file[k[0]].attrs['Y_FIRST']) lon_step = float(h5file[k[0]].attrs['X_STEP']) lat_step = float(h5file[k[0]].attrs['Y_STEP']) lon_unit = h5file[k[0]].attrs['Y_UNIT'] lat_unit = h5file[k[0]].attrs['X_UNIT'] llcrnrlon = ullon llcrnrlat = ullat + lat_step * data.shape[0] urcrnrlon = ullon + lon_step * data.shape[1] urcrnrlat = ullat geocoord = 'yes' print 'Input file is Geocoded' except: geocoord = 'no' try: win_x wx = [int(i) for i in win_x.split()] data = data[:, wx[0]:wx[1]] xref = xref - wx[0] except: print 'No subste in x direction' try: win_y wy = [int(i) for i in win_y.split()] data = data[wy[0]:wy[1], :] yref = yref - wy[0] except: print 'No subset in y direction' try: min except: min = np.nanmin(data) try: max except: max = np.nanmax(data) if flip_lr == 'yes': data = np.fliplr(data) xref = np.shape(data)[1] - xref - 1 if flip_ud == 'yes': data = np.flipud(data) yref = np.shape(data)[0] - yref - 1 try: demFile # amp,dem,demRsc = readfile.read_float32(demFile) if os.path.basename(demFile).split('.')[1] == 'hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1] == 'dem': dem, demRsc = readfile.read_dem(demFile) try: win_x wx = [int(i) for i in win_x.split()] dem = dem[:, wx[0]:wx[1]] except: print '' try: win_y wy = [int(i) for i in win_y.split()] dem = dem[wy[0]:wy[1], :] except: print '' if flip_lr == 'yes': dem = np.fliplr(dem) if flip_ud == 'yes': dem = np.flipud(dem) cmap_dem = plt.get_cmap('gray') if disp_geo in ('yes', 'Yes', 'Y', 'y', 'YES') and geocoord in ('yes', 'Yes', 'Y', 'y', 'YES'): print 'display geo' # from mpl_toolkits.basemap import Basemap # m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat,resolution='f', area_thresh=1., projection='cyl') # m.imshow(ut.hillshade(dem,50.0), interpolation='nearest', origin='upper') # m.drawcoastlines(color='w',linewidth=0.8) # m.drawmapboundary() # draw a line around the map region # m.drawrivers() # m.drawparallels(numpy.arange(int(d1.min()), int(d1.max()), 1),linewidth=0.2,labels=[1,0,0,0]) # m.drawmeridians(numpy.arange(int(d0.min()), int(d0.max()), 1),linewidth=0.2,labels=[0,0,0,1]) else: print 'Not GEO' plt.imshow(ut.hillshade(dem, 50.0), cmap=cmap_dem) except: print 'No DEM file' plt.imshow(data, cmap=ccmap, vmin=min, vmax=max) plt.colorbar() if k[0] == 'velocity': plt.title('Velocity (m/yr)', fontsize=font_size) figName = 'velocity.pdf' elif k[0] == 'temporal_coherence': plt.title('Temporal coherence', fontsize=font_size) figName = 'temporal_coherence.pdf' elif k[0] == 'dem': plt.title('DEM error', fontsize=font_size) figName = 'DEM_error.pdf' elif k[0] == 'rmse': plt.title('RMSE (m/yr)', fontsize=font_size) figName = 'rmse.pdf' elif k[0] == 'mask': plt.title('Pixels with no valid value.', fontsize=font_size) figName = 'mask.pdf' if showRef == 'yes': try: refPoint = ref_color + ref_symbol plt.plot(xref, yref, refPoint, ms=ref_size) except: print 'No reference point' plt.xlim(0, np.shape(data)[1]) plt.ylim(np.shape(data)[0], 0) if saveFig == 'yes': plt.savefig(figName) plt.show() # plt.savefig('fig.pdf') # fig = plt.figure() # ax.imshow(data,vmin=min, vmax=max) # ax.xaxis.label.set_fontsize(40) #################################################################### #################################################################### if 'timeseries' in k and allData2display == 'yes': if rewrapping == 'yes': print 'rewrapping' dateList = h5file['timeseries'].keys() nfigs = figs_rows * figs_cols ligram = len(dateList) range2phase = 4 * np.pi / float( h5file['timeseries'].attrs['WAVELENGTH']) # range2phase=4*np.pi/0.056 print 'number of timeseries epochs to display:' + str(ligram) kk = int(ligram / nfigs) + 1 ii = 0 for j in range(1, kk): fig = plt.figure(j) ii = (j - 1) * nfigs + 1 for i in range(ii, ii + nfigs): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) dset = h5file['timeseries'].get(dateList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] data = range2phase * data # data=np.angle(np.exp(1j*data)) data = rewrap(data) ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(dateList[i - 1], fontsize=font_size) elif title == 'in': add_inner_title(ax, dateList[i - 1], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) figName = k[0] + '_' + str(j) + '.pdf' if saveFig in ['yes', 'Yes', 'y', 'YES']: plt.savefig(figName) fig = plt.figure(kk) ii = (kk - 1) * nfigs + 1 for i in range(ii, ligram + 1): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) dset = h5file['timeseries'].get(dateList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] data = range2phase * data # data=np.angle(np.exp(1j*data)) data = rewrap(data) ax.imshow(data, cmap=ccmap) ax.xaxis.label.set_fontsize(20) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(dateList[i - 1], fontsize=font_size) elif title == 'in': add_inner_title(ax, dateList[i - 1], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) figName = k[0] + '_' + str(kk) + '.pdf' if saveFig in ['yes', 'Yes', 'y', 'YES']: plt.savefig(figName) plt.show() else: print 'No rewrapping' dateList = h5file['timeseries'].keys() nfigs = figs_rows * figs_cols ligram = len(dateList) print 'number of timeseries epochs to display:' + str(ligram) kk = int(ligram / nfigs) + 1 ii = 0 for j in range(1, kk): fig = plt.figure(j) ii = (j - 1) * nfigs + 1 for i in range(ii, ii + nfigs): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) data = h5file['timeseries'].get(dateList[i - 1]) try: im = ax.imshow(data, cmap=ccmap, vmin=min, vmax=max) # print 'here' except: im = ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(dateList[i - 1], fontsize=font_size) elif title == 'in': add_inner_title(ax, dateList[i - 1], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) fig = plt.figure(kk) ii = (kk - 1) * nfigs + 1 for i in range(ii, ligram + 1): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) data = h5file['timeseries'].get(dateList[i - 1]) try: im = ax.imshow(data, cmap=ccmap, vmin=min, vmax=max) except: im = ax.imshow(data, cmap=ccmap) ax.xaxis.label.set_fontsize(20) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(dateList[i - 1], fontsize=font_size) if title == 'in': add_inner_title(ax, dateList[i - 1], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) plt.show() #################################################################### #################################################################### elif 'timeseries' in k and allData2display == 'no': dateList = h5file['timeseries'].keys() try: epoch_number except: epoch_number = dateList.index(epoch_date) range2phase = 4 * np.pi / float( h5file['timeseries'].attrs['WAVELENGTH']) # range2phase=4*np.pi/0.056 dset = h5file['timeseries'].get(dateList[epoch_number]) data = dset[0:dset.shape[0], 0:dset.shape[1]] if rewrapping == 'yes': data = range2phase * data # data=np.angle(np.exp(1j*data)) data = rewrap(data) try: min except: min = np.nanmin(data) try: max except: max = np.nanmax(data) plt.imshow(data, cmap=ccmap, vmin=min, vmax=max) plt.colorbar() plt.show() ################################################################ ################################################################ if k[0] in ('interferograms', 'coherence', 'wrapped') and allData2display == 'yes': if k[0] in ('coherence', 'wrapped'): rewrapping = 'no' # color_map = 'gray' # ccmap=plt.get_cmap(color_map) if rewrapping == 'yes': ifgramList = h5file[k[0]].keys() nfigs = figs_rows * figs_cols ligram = len(ifgramList) print 'number of ' + k[0] + ' to display:' + str(ligram) kk = int(ligram / nfigs) + 1 ii = 0 for j in range(1, kk): fig = plt.figure(j) ii = (j - 1) * nfigs + 1 for i in range(ii, ii + nfigs): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) dset = h5file[k[0]][ifgramList[i - 1]].get(ifgramList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] data = np.angle(np.exp(1j * data)) ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title( h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], fontsize=font_size) elif title == 'in': add_inner_title( ax, h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) fig = plt.figure(kk) ii = (kk - 1) * nfigs + 1 for i in range(ii, ligram + 1): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) dset = h5file[k[0]][ifgramList[i - 1]].get(ifgramList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] data = np.angle(np.exp(1j * data)) ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], fontsize=font_size) elif title == 'in': add_inner_title( ax, h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) plt.show() else: ifgramList = h5file[k[0]].keys() nfigs = figs_rows * figs_cols ligram = len(ifgramList) print 'number of ' + k[0] + ' to display:' + str(ligram) kk = int(ligram / nfigs) + 1 ii = 0 for j in range(1, kk): fig = plt.figure(j) ii = (j - 1) * nfigs + 1 for i in range(ii, ii + nfigs): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) print 'loading ' + ifgramList[i - 1] dset = h5file[k[0]][ifgramList[i - 1]].get(ifgramList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] try: ax.imshow(data, vmin=min, vmax=max, cmap=ccmap) except: ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title( h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], fontsize=font_size) elif title == 'in': add_inner_title( ax, h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) fig = plt.figure(kk) ii = (kk - 1) * nfigs + 1 for i in range(ii, ligram + 1): ax = fig.add_subplot(figs_rows, figs_cols, i - ii + 1) print 'loading ' + ifgramList[i - 1] dset = h5file[k[0]][ifgramList[i - 1]].get(ifgramList[i - 1]) data = dset[0:dset.shape[0], 0:dset.shape[1]] #data = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) try: ax.imshow(data, vmin=min, vmax=max, cmap=ccmap) except: ax.imshow(data, cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title == 'out': ax.set_title(h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], fontsize=font_size) elif title == 'in': add_inner_title( ax, h5file[k[0]][ifgramList[i - 1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace, hspace=Hspace) plt.show() #################################################################### #################################################################### elif k[0] in ('interferograms', 'coherence', 'wrapped') and allData2display == 'no': if k[0] in ('coherence', 'wrapped'): rewrapping == 'no' ifgramList = h5file[k[0]].keys() try: epoch_number except: for i in range(len(ifgramList)): if epoch_date in ifgramList[i]: epoch_number = i dset = h5file[k[0]][ifgramList[epoch_number]].get( ifgramList[epoch_number]) data = dset[0:dset.shape[0], 0:dset.shape[1]] if rewrapping == 'yes': data = np.angle(np.exp(1j * data)) if dip_opposite in ('yes', 'Yes', 'Y', 'y', 'YES'): data = -1 * data #DEM basemap try: demFile if os.path.basename(demFile).split('.')[1] == 'hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1] == 'dem': dem, demRsc = readfile.read_dem(demFile) try: win_x wx = [int(i) for i in win_x.split()] dem = dem[:, wx[0]:wx[1]] except: print '' try: win_y wy = [int(i) for i in win_y.split()] dem = dem[wy[0]:wy[1], :] except: print '' if flip_lr == 'yes': dem = np.fliplr(dem) if flip_ud == 'yes': dem = np.flipud(dem) cmap_dem = plt.get_cmap('gray') if disp_geo in ('yes', 'Yes', 'Y', 'y', 'YES') and geocoord in ('yes', 'Yes', 'Y', 'y', 'YES'): print 'display geo' else: print 'Not GEO' plt.imshow(ut.hillshade(dem, 50.0), cmap=cmap_dem) except: print 'No DEM file' try: plt.imshow(data, cmap=ccmap, vmin=min, vmax=max) except: plt.imshow(data, cmap=ccmap) plt.colorbar() # plt.title(h5file[k[0]][ifgramList[epoch_number]].attrs['DATE12'],fontsize=font_size) plt.title(ifgramList[epoch_number], fontsize=font_size) plt.show() ################################################################ ################################################################ h5file.close()
def main(argv): maskThr=0.7 try: opts, args = getopt.getopt(argv,"f:d:p:m:M:t:") except getopt.GetoptError: Usage() ; sys.exit(1) for opt,arg in opts: if opt == '-f': timeSeriesFile = arg elif opt == '-d': demFile = arg elif opt == '-p': p = int(arg) elif opt == '-m': maskFile=arg print maskFile elif opt == '-M': maskThr=float(arg) elif opt == '-t': corThr=float(arg) try: timeSeriesFile demFile except: Usage() ; sys.exit(1) try: p except: p=1 try: maskFile except: maskFile='Mask.h5' print 'Mask file: ' + maskFile ################################################### h5Mask=h5py.File(maskFile) kMask=h5Mask.keys() dset = h5Mask[kMask[0]].get(kMask[0]) Mask = dset[0:dset.shape[0],0:dset.shape[1]] # Mask[600:dset.shape[0],:]=0 Mask=Mask.flatten(1) print maskFile print maskThr if kMask[0]=='mask': ndx = Mask !=0 elif kMask[0]=='temporal_coherence': ndx = Mask >maskThr else: print 'Mask file not recognized!' Usage();sys.exit(1) h5Mask.close() ################################################### h5timeseries = h5py.File(timeSeriesFile) yref=h5timeseries['timeseries'].attrs['ref_y'] xref=h5timeseries['timeseries'].attrs['ref_x'] ################################################### if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) dem=dem-dem[yref][xref] # try: print 'considering the look angle of each resolution cell...' near_LA=float(h5timeseries['timeseries'].attrs['LOOK_REF1']) far_LA=float(h5timeseries['timeseries'].attrs['LOOK_REF2']) Length,Width=np.shape(dem) LA=np.linspace(near_LA,far_LA,Width) LA=np.tile(LA,[Length,1]) dem=dem/np.cos(LA*np.pi/180.0) # except: # print 'Look angle is not considered' dem=dem.flatten(1) print np.shape(dem) ################################################### if p==1: A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem,np.ones(len(dem)))).T elif p==2: A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**2,dem,np.ones(len(dem)))).T elif p==3: A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**3,dem**2,dem,np.ones(len(dem)))).T print np.shape(A) Ainv=np.linalg.pinv(A) ################################################### print 'Estimating the tropospheric effect using the differences of the subsequent epochs and DEM' dateList = h5timeseries['timeseries'].keys() nrows,ncols=np.shape(h5timeseries['timeseries'].get(dateList[0])) PAR_EPOCH_DICT_2={} par_diff_Dict={} Correlation_Dict={} Correlation_Dict[dateList[0]]=0 Correlation_diff_Dict={} for i in range(len(dateList)-1): dset1 = h5timeseries['timeseries'].get(dateList[i]) dset2 = h5timeseries['timeseries'].get(dateList[i+1]) data1 = dset1[0:dset1.shape[0],0:dset1.shape[1]] data2 = dset2[0:dset2.shape[0],0:dset2.shape[1]] d = dset2[0:dset2.shape[0],0:dset2.shape[1]] - dset1[0:dset1.shape[0],0:dset1.shape[1]] del dset1 del dset2 d=d.flatten(1) data1=data1.flatten(1) data2=data2.flatten(1) ############################## print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' print 'correlation of dem with :' print '******************************' print dateList[i] C1=np.zeros([2,len(dem[ndx])]) C1[0][:]=dem[ndx] C1[1][:]=data1[ndx] print np.corrcoef(C1)[0][1] # Correlation_Dict[dateList[i]]=np.corrcoef(C1)[0][1] # print '******************************' print dateList[i+1] C2=np.zeros([2,len(dem[ndx])]) C2[0][:]=dem[ndx] C2[1][:]=data2[ndx] print np.corrcoef(C2)[0][1] Correlation_Dict[dateList[i+1]]=np.corrcoef(C2)[0][1] # print '******************************' print dateList[i]+'-'+dateList[i+1] C=np.zeros([2,len(dem[ndx])]) C[0][:]=dem[ndx] C[1][:]=d[ndx] print np.corrcoef(C)[0][1] print '******************************' Correlation_diff_Dict[dateList[i]+'-'+dateList[i+1]]=np.corrcoef(C)[0][1] ############################## # try: # if np.corrcoef(C)[0][1] >= corThr: # par=np.dot(Ainv,d[ndx]) # else: # par=[0,0] # except: par=np.dot(Ainv,d[ndx]) # del d par_diff_Dict[dateList[i]+'-'+dateList[i+1]]=par # print par try: if np.abs(np.corrcoef(C2)[0][1]) >= corThr: PAR2=np.dot(Ainv,data2[ndx]) else: #PAR2=[0,0] PAR2=list(np.zeros(p+1)) except: PAR2=np.dot(Ainv,data2[ndx]) PAR_EPOCH_DICT_2[dateList[i+1]]=PAR2 # print PAR2 ################################################### print'****************************************' print 'Correlation of DEM with each time-series epoch:' average_phase_height_cor=0 for date in dateList: print date + ' : '+str(Correlation_Dict[date]) # print date + ' : ' + str(Correlation_Dict[date]) average_phase_height_cor=average_phase_height_cor+np.abs(Correlation_Dict[date]) print'****************************************' print'****************************************' print '' print 'Average Correlation of DEM with time-series epochs: ' + str(average_phase_height_cor/(len(dateList)-1)) print '' print '****************************************' print'****************************************' # print 'Correlation of DEM with epoch differences' # for key , value in Correlation_diff_Dict.iteritems(): # print key+' : '+str(value) # print'****************************************' # print 'Estimated parameters for each time-series epoch:' # for date in dateList: # print date + ' : ' + str(PAR_EPOCH_DICT_2[date]) # print'****************************************' ################################################### #fig=plt.figure(1) #ax = fig.add_subplot(3,1,1) #ax.plot(dem[ndx],data1[ndx],'o',ms=1) #ax = fig.add_subplot(3,1,2) #ax.plot(dem[ndx],data2[ndx],'o',ms=1) #ax = fig.add_subplot(3,1,3) #ax.plot(dem[ndx],d[ndx],'o',ms=1) #plt.show() ################################################### # print par_diff_Dict par_epoch_Dict={} par_epoch_Dict[dateList[1]]=par_diff_Dict[dateList[0]+'-'+dateList[1]] for i in range(2,len(dateList)): par_epoch_Dict[dateList[i]]=par_epoch_Dict[dateList[i-1]]+par_diff_Dict[dateList[i-1]+'-'+dateList[i]] print '###############################' #for key , value in PAR_EPOCH_DICT_2.iteritems(): # print key+' : '+str(value) +' | '+str(par_epoch_Dict[key]) # print PAR_EPOCH_DICT_2 # print par_epoch_Dict print '###############################' yref=h5timeseries['timeseries'].attrs['ref_y'] xref=h5timeseries['timeseries'].attrs['ref_x'] print 'removing the tropospheric delay from each epoch and writing '+timeSeriesFile.split('.')[0]+'_tropCor.h5:' h5tropCor = h5py.File(timeSeriesFile.split('.')[0]+'_tropCor.h5','w') group = h5tropCor.create_group('timeseries') dset = group.create_dataset(dateList[0], data=h5timeseries['timeseries'].get(dateList[0]), compression='gzip') for date in dateList: if not date in h5tropCor['timeseries']: dset = h5timeseries['timeseries'].get(date) data = dset[0:dset.shape[0],0:dset.shape[1]] # par=par_epoch_Dict[date] par=PAR_EPOCH_DICT_2[date] # print np.shape(B) # print np.shape(par) tropo_effect=np.reshape(np.dot(B,par),[dset.shape[1],dset.shape[0]]).T tropo_effect=tropo_effect-tropo_effect[yref][xref] dset = group.create_dataset(date, data=data-tropo_effect, compression='gzip') for key,value in h5timeseries['timeseries'].attrs.iteritems(): group.attrs[key] = value dset1 = h5timeseries['mask'].get('mask') group=h5tropCor.create_group('mask') dset = group.create_dataset('mask', data=dset1, compression='gzip') h5tropCor.close() h5timeseries.close()
def main(argv): #outName='subsetIgrams.h5' try: opts, args = getopt.getopt(argv, "h:f:x:y:o:l:L:") except getopt.GetoptError: print 'Error while getting args' Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-y': ysub = [int(i) for i in arg.split(':')] ysub.sort() elif opt == '-x': xsub = [int(i) for i in arg.split(':')] xsub.sort() elif opt == '-o': outName = arg elif opt == '-l': Latsub = [float(i) for i in arg.split(':')] Latsub.sort() elif opt == '-L': Lonsub = [float(i) for i in arg.split(':')] Lonsub.sort() ##################################################### try: File xsub ysub except: try: File Latsub Lonsub except: Usage() sys.exit(1) try: outName except: outName = 'subset_' + File ext = os.path.splitext(File)[1] if ext == '.h5': try: h5file = h5py.File(File, 'r') except: Usage() sys.exit(1) k = h5file.keys() # convert LatLon to xy for geocoded file try: Latsub Lonsub if 'X_FIRST' in h5file[k[0]].attrs.keys(): xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(h5file[k[0]].attrs['X_FIRST'])) / float(h5file[k[0]].attrs['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(h5file[k[0]].attrs['X_FIRST'])) / float(h5file[k[0]].attrs['X_STEP'])) ysub[0] = int( (Latsub[1] - float(h5file[k[0]].attrs['Y_FIRST'])) / float(h5file[k[0]].attrs['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(h5file[k[0]].attrs['Y_FIRST'])) / float(h5file[k[0]].attrs['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' elif 'X_FIRST' in h5file[k[0]][h5file[k[0]].keys()[0]].attrs.keys( ): # for geocoded interferograms/coherence igramList = h5file[k[0]].keys() xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(h5file[k[0]][igramList[0]].attrs['X_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(h5file[k[0]][igramList[0]].attrs['X_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) ysub[0] = int( (Latsub[1] - float(h5file[k[0]][igramList[0]].attrs['Y_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(h5file[k[0]][igramList[0]].attrs['Y_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' else: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 # k=h5file.keys() if 'interferograms' in k: igramList = h5file['interferograms'].keys() h5out = h5py.File(outName, 'w') gg = h5out.create_group('interferograms') for igram in igramList: print igram dset1 = h5file['interferograms'][igram].get(igram) group = gg.create_group(igram) dset = group.create_dataset(igram, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file['interferograms'][igram].attrs.keys(): group.attrs['X_FIRST'] = float( h5file['interferograms'] [igram].attrs['X_FIRST']) + xsub[0] * float( h5file['interferograms'][igram].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file['interferograms'] [igram].attrs['Y_FIRST']) + ysub[0] * float( h5file['interferograms'][igram].attrs['Y_STEP']) gm = h5out.create_group('mask') try: Mset = h5file['mask'].get('mask') dset = gm.create_dataset('mask', data=Mset[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') except: print 'No group for mask found! It may cause problem in other processing steps.' try: Cset = h5file['meanCoherence'].get('meanCoherence') gm = h5out.create_group('meanCoherence') dset = gm.create_dataset('meanCoherence', data=Cset[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') except: print 'No average coherence found in the File' elif k[0] in ('coherence', 'wrapped'): corList = h5file[k[0]].keys() h5out = h5py.File(outName, 'w') gg = h5out.create_group(k[0]) for cor in corList: print cor dset1 = h5file[k[0]][cor].get(cor) group = gg.create_group(cor) dset = group.create_dataset(cor, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file[k[0]][cor].attrs.keys(): group.attrs['X_FIRST'] = float( h5file[k[0]][cor].attrs['X_FIRST']) + xsub[0] * float( h5file[k[0]][cor].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file[k[0]][cor].attrs['Y_FIRST']) + ysub[0] * float( h5file[k[0]][cor].attrs['Y_STEP']) elif 'timeseries' in h5file.keys(): dateList = h5file['timeseries'].keys() h5out = h5py.File(outName, 'w') group = h5out.create_group('timeseries') for d in dateList: print d dset1 = h5file['timeseries'].get(d) dset = group.create_dataset(d, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file['timeseries'].attrs.keys(): group.attrs['X_FIRST'] = float( h5file['timeseries'].attrs['X_FIRST']) + xsub[0] * float( h5file['timeseries'].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file['timeseries'].attrs['Y_FIRST']) + ysub[0] * float( h5file['timeseries'].attrs['Y_STEP']) h5file.close() h5out.close() elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys( ) or 'rmse' in h5file.keys(): print 'writing >>> ' + outName dset = h5file[k[0]].get(k[0]) data = dset[ysub[0]:ysub[1], xsub[0]:xsub[1]] hfout = h5py.File(outName, 'w') group = hfout.create_group(k[0]) group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = data.shape[0] group.attrs['WIDTH'] = data.shape[1] group.attrs['XMIN'] = 0 group.attrs['XMAX'] = data.shape[1] - 1 group.attrs['YMIN'] = 0 group.attrs['YMAX'] = data.shape[0] - 1 group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file[k[0]].attrs.keys(): group.attrs['X_FIRST'] = float( h5file[k[0]].attrs['X_FIRST']) + xsub[0] * float( h5file[k[0]].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file[k[0]].attrs['Y_FIRST']) + ysub[0] * float( h5file[k[0]].attrs['Y_STEP']) h5file.close() hfout.close() elif ext in ['.unw', '.cor', '.hgt']: a, p, r = readfile.read_float32(File) try: Latsub Lonsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 a = a[ysub[0]:ysub[1], xsub[0]:xsub[1]] p = p[ysub[0]:ysub[1], xsub[0]:xsub[1]] print 'writing >>> ' + outName writefile.write_float32(p, outName) r['FILE_LENGTH'] = str(p.shape[0]) r['WIDTH'] = str(p.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.dem': d, r = readfile.read_dem(File) try: Latsub Lonsub # print Latsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 d = d[ysub[0]:ysub[1], xsub[0]:xsub[1]] print 'writing >>> ' + outName writefile.write_dem(d, outName) r['FILE_LENGTH'] = str(d.shape[0]) r['WIDTH'] = str(d.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(File) try: r = readfile.read_rsc_file(File + '.rsc') except: sys.exit(1) try: Latsub Lonsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 box = (xsub[0], ysub[0], xsub[1], ysub[1]) output_img = im.crop(box) print 'writing >>> ' + outName output_img.save(outName) # try: # r=readfile.read_rsc_file(File+'.rsc') # except: # sys.exit(1) r['FILE_LENGTH'] = str(ysub[1] - ysub[0]) r['WIDTH'] = str(xsub[1] - xsub[0]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close()
correlation_with_dem.py radar_8rlks.hgt velocity.h5 *********************************************************************** *********************************************************************** ''' try: demFile=sys.argv[1] File=sys.argv[2] except: Usage() sys.exit(1) if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) #amp,dem,demRsc = readfile.read_float32(demFile) h5data = h5py.File(File) dset = h5data['velocity'].get('velocity') data = dset[0:dset.shape[0],0:dset.shape[1]] try: suby=sys.argv[3].split(':') subx=sys.argv[4].split(':') data = data[int(suby[0]):int(suby[1]),int(subx[0]):int(subx[1])] dem = dem[int(suby[0]):int(suby[1]),int(subx[0]):int(subx[1])] except:
def main(argv): #outName='subsetIgrams.h5' try: opts, args = getopt.getopt(argv,"h:f:x:y:o:l:L:") except getopt.GetoptError: print 'Error while getting args' Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt=='-y': ysub=[int(i) for i in arg.split(':')] ysub.sort() elif opt=='-x': xsub = [int(i) for i in arg.split(':')] xsub.sort() elif opt=='-o': outName=arg elif opt=='-l': Latsub=[float(i) for i in arg.split(':')] Latsub.sort() elif opt=='-L': Lonsub = [float(i) for i in arg.split(':')] Lonsub.sort() ##################################################### try: File xsub ysub except: try: File Latsub Lonsub except: Usage();sys.exit(1) try: outName except: outName='subset_'+File ext = os.path.splitext(File)[1] if ext == '.h5': try: h5file=h5py.File(File,'r') except: Usage() ; sys.exit(1) k=h5file.keys() # convert LatLon to xy for geocoded file try: Latsub Lonsub if 'X_FIRST' in h5file[k[0]].attrs.keys(): xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(h5file[k[0]].attrs['X_FIRST']))/float(h5file[k[0]].attrs['X_STEP'])) xsub[1]=int((Lonsub[1]-float(h5file[k[0]].attrs['X_FIRST']))/float(h5file[k[0]].attrs['X_STEP'])) ysub[0]=int((Latsub[1]-float(h5file[k[0]].attrs['Y_FIRST']))/float(h5file[k[0]].attrs['Y_STEP'])) ysub[1]=int((Latsub[0]-float(h5file[k[0]].attrs['Y_FIRST']))/float(h5file[k[0]].attrs['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' elif 'X_FIRST' in h5file[k[0]][h5file[k[0]].keys()[0]].attrs.keys(): # for geocoded interferograms/coherence igramList=h5file[k[0]].keys() xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(h5file[k[0]][igramList[0]].attrs['X_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) xsub[1]=int((Lonsub[1]-float(h5file[k[0]][igramList[0]].attrs['X_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) ysub[0]=int((Latsub[1]-float(h5file[k[0]][igramList[0]].attrs['Y_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) ysub[1]=int((Latsub[0]-float(h5file[k[0]][igramList[0]].attrs['Y_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' else: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 # k=h5file.keys() if 'interferograms' in k: igramList=h5file['interferograms'].keys() h5out=h5py.File(outName,'w') gg=h5out.create_group('interferograms') for igram in igramList: print igram dset1=h5file['interferograms'][igram].get(igram) group=gg.create_group(igram) dset=group.create_dataset(igram, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file['interferograms'][igram].attrs.keys(): group.attrs['X_FIRST']=float(h5file['interferograms'][igram].attrs['X_FIRST']) + xsub[0]*float(h5file['interferograms'][igram].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file['interferograms'][igram].attrs['Y_FIRST']) + ysub[0]*float(h5file['interferograms'][igram].attrs['Y_STEP']) gm=h5out.create_group('mask') try: Mset=h5file['mask'].get('mask') dset=gm.create_dataset('mask', data=Mset[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') except: print 'No group for mask found! It may cause problem in other processing steps.' try: Cset=h5file['meanCoherence'].get('meanCoherence') gm=h5out.create_group('meanCoherence') dset=gm.create_dataset('meanCoherence', data=Cset[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') except: print 'No average coherence found in the File' elif k[0] in ('coherence','wrapped'): corList=h5file[k[0]].keys() h5out=h5py.File(outName,'w') gg=h5out.create_group(k[0]) for cor in corList: print cor dset1=h5file[k[0]][cor].get(cor) group=gg.create_group(cor) dset=group.create_dataset(cor, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file[k[0]][cor].attrs.keys(): group.attrs['X_FIRST']=float(h5file[k[0]][cor].attrs['X_FIRST']) + xsub[0]*float(h5file[k[0]][cor].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file[k[0]][cor].attrs['Y_FIRST']) + ysub[0]*float(h5file[k[0]][cor].attrs['Y_STEP']) elif 'timeseries' in h5file.keys(): dateList=h5file['timeseries'].keys() h5out=h5py.File(outName,'w') group=h5out.create_group('timeseries') for d in dateList: print d dset1=h5file['timeseries'].get(d) dset=group.create_dataset(d, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file['timeseries'].attrs.keys(): group.attrs['X_FIRST']=float(h5file['timeseries'].attrs['X_FIRST']) + xsub[0]*float(h5file['timeseries'].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file['timeseries'].attrs['Y_FIRST']) + ysub[0]*float(h5file['timeseries'].attrs['Y_STEP']) h5file.close() h5out.close() elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys() or 'rmse' in h5file.keys(): print 'writing >>> ' +outName dset=h5file[k[0]].get(k[0]) data=dset[ysub[0]:ysub[1],xsub[0]:xsub[1]] hfout=h5py.File(outName,'w') group= hfout.create_group(k[0]) group.create_dataset(k[0],data=data,compression='gzip') for key,value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value group.attrs['FILE_LENGTH']=data.shape[0] group.attrs['WIDTH']=data.shape[1] group.attrs['XMIN']=0 group.attrs['XMAX']=data.shape[1]-1 group.attrs['YMIN']=0 group.attrs['YMAX']=data.shape[0]-1 group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file[k[0]].attrs.keys(): group.attrs['X_FIRST']=float(h5file[k[0]].attrs['X_FIRST']) + xsub[0]*float(h5file[k[0]].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file[k[0]].attrs['Y_FIRST']) + ysub[0]*float(h5file[k[0]].attrs['Y_STEP']) h5file.close() hfout.close() elif ext in ['.unw','.cor','.hgt']: a,p,r = readfile.read_float32(File) try: Latsub Lonsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 a=a[ysub[0]:ysub[1],xsub[0]:xsub[1]] p=p[ysub[0]:ysub[1],xsub[0]:xsub[1]] print 'writing >>> '+outName writefile.write_float32(p,outName) r['FILE_LENGTH']=str(p.shape[0]) r['WIDTH']=str(p.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext== '.dem': d,r = readfile.read_dem(File) try: Latsub Lonsub # print Latsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 d=d[ysub[0]:ysub[1],xsub[0]:xsub[1]] print 'writing >>> '+outName writefile.write_dem(d,outName) r['FILE_LENGTH']=str(d.shape[0]) r['WIDTH']=str(d.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(File) try: r=readfile.read_rsc_file(File+'.rsc') except: sys.exit(1) try: Latsub Lonsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 box = (xsub[0],ysub[0],xsub[1],ysub[1]) output_img = im.crop(box) print 'writing >>> '+outName output_img.save(outName) # try: # r=readfile.read_rsc_file(File+'.rsc') # except: # sys.exit(1) r['FILE_LENGTH']=str(ysub[1]-ysub[0]) r['WIDTH']=str(xsub[1]-xsub[0]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close()
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == "-f": file = arg elif opt == "-t": filtType = arg elif opt == "-p": par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split(".")[0] + "_" + filtType + ext try: par except: par = [] print "+++++++++++++++++++++++++++" print "Filter type : " + filtType print "parameters : " + str(par) print "+++++++++++++++++++++++++++" ############################################### if ext == ".int" or ext == ".slc": a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == ".unw" or ext == ".cor" or ext == ".hgt": a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".dem"): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print "writing " + outName writefile.write_dem(dlks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext in [".jpeg", "jpg", "png"]: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print "writing " + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + ".rsc") except: sys.exit(1) r["FILE_LENGTH"] = str(height) r["WIDTH"] = str(width) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".h5"): h5file = h5py.File(file, "r") # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, "w") if "interferograms" in h5file.keys(): print "Filtering the interferograms in space" gg = h5file_lks.create_group("interferograms") igramList = h5file["interferograms"].keys() for igram in igramList: print igram unwSet = h5file["interferograms"][igram].get(igram) unw = unwSet[0 : unwSet.shape[0], 0 : unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression="gzip") for key, value in h5file["interferograms"][igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file["mask"].get("mask") mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=mask, compression="gzip") elif "timeseries" in h5file.keys(): print "Filtering the time-series" group = h5file_lks.create_group("timeseries") dateList = h5file["timeseries"].keys() for d in dateList: print d dset1 = h5file["timeseries"].get(d) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression="gzip") for key, value in h5file["timeseries"].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file["mask"].get("mask") Mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=Mask, compression="gzip") except: print "Filterd file does not include the maske" elif "temporal_coherence" in h5file.keys() or "velocity" in h5file.keys() or "mask" in h5file.keys(): k = h5file.keys() print "filtering the " + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression="gzip") for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): try: opts, args = getopt.getopt(argv,"h:D:O:G:S:f:m:M:l:u:s:c:e:d:r:p:w:i:j:t:R:a:b:k:x:y:") except getopt.GetoptError: Usage() ; sys.exit(1) flip_lr='no' flip_ud='no' disp_geo = 'no' font_size=8 color_map='jet' figs_rows=5 figs_cols=8 rewrapping='yes' allData2display='yes' Wspace = 0.1 Hspace = 0.1 title = 'out' # title = 'None' showRef = 'yes' ref_color='k' ref_symbol='s' ref_size =10 dip_opposite = 'no' saveFig='no' if opts==[]: Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-D': demFile=arg elif opt == '-m': min = float(arg) elif opt == '-M': max = float(arg) elif opt == '-l': flip_lr = arg elif opt == '-u': flip_ud = arg elif opt == '-s': font_size = int(arg) elif opt == '-c': color_map = arg elif opt == '-e': epoch_number = int(arg) allData2display='no' elif opt == '-d': epoch_date = arg allData2display='no' elif opt == '-r': figs_rows = int(arg) elif opt == '-p': figs_cols = int(arg) elif opt == '-w': rewrapping = arg elif opt == '-i': Wspace = float(arg) elif opt == '-j': Hspace = float(arg) elif opt == '-t': title = arg elif opt == '-R': showRef = arg elif opt == '-a': ref_color = arg elif opt == '-b': ref_symbol = arg elif opt == 'k': ref_size=int(arg) elif opt == '-x': win_x = arg elif opt == '-y': win_y = arg elif opt == '-G': disp_geo = arg elif opt == '-O': dip_opposite=arg elif opt=='-S': saveFig=arg h5file=h5py.File(File,'r') k=h5file.keys() print k if color_map == 'hsv': ################################################ cdict1 = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.0), (0.6, 1.0, 1.0), (0.8, 1.0, 1.0), (1.0, 0.5, 0.5)), 'green': ((0.0, 0.0, 0.0), (0.2, 0.0, 0.0), (0.4, 1.0, 1.0), (0.6, 1.0, 1.0), (0.8, 0.0, 0.0), (1.0, 0.0, 0.0)), 'blue': ((0.0, 0.5, .5), (0.2, 1.0, 1.0), (0.4, 1.0, 1.0), (0.5, 0.0, 0.0), (1.0, 0.0, 0.0),) } ccmap = LinearSegmentedColormap('BlueRed1', cdict1) ################################################ else: ccmap=plt.get_cmap(color_map) #################################################################### #################################################################### # if k[0]=='velocity' or k[0]=='temporal_coherence' or k[0]=='rmse': if len(k)==1 and k[0] in ('dem','velocity','mask','temporal_coherence','rmse'): dset = h5file[k[0]].get(k[0]) data=dset[0:dset.shape[0],0:dset.shape[1]] if dip_opposite in('yes','Yes','Y','y','YES'): data=-1*data try: xref=h5file[k[0]].attrs['ref_x'] yref=h5file[k[0]].attrs['ref_y'] except: print 'No reference point' # Yunjun, Mar 2015 try: xref=xref-h5file[k[0]].attrs['subset_x0'] yref=yref-h5file[k[0]].attrs['subset_y0'] except: print 'No subset' try: ullon=float(h5file[k[0]].attrs['X_FIRST']) ullat=float(h5file[k[0]].attrs['Y_FIRST']) lon_step=float(h5file[k[0]].attrs['X_STEP']) lat_step=float(h5file[k[0]].attrs['Y_STEP']) lon_unit=h5file[k[0]].attrs['Y_UNIT'] lat_unit=h5file[k[0]].attrs['X_UNIT'] llcrnrlon=ullon llcrnrlat=ullat+lat_step*data.shape[0] urcrnrlon=ullon+lon_step*data.shape[1] urcrnrlat=ullat geocoord='yes' print 'Input file is Geocoded' except: geocoord='no' try: win_x wx=[int(i) for i in win_x.split()] data=data[:,wx[0]:wx[1]] xref = xref-wx[0] except: print 'No subste in x direction' try: win_y wy=[int(i) for i in win_y.split()] data=data[wy[0]:wy[1],:] yref = yref-wy[0] except: print 'No subset in y direction' try: min except: min=np.nanmin(data) try: max except: max=np.nanmax(data) if flip_lr=='yes': data=np.fliplr(data) xref=np.shape(data)[1]-xref-1 if flip_ud=='yes': data=np.flipud(data) yref=np.shape(data)[0]-yref-1 try: demFile # amp,dem,demRsc = readfile.read_float32(demFile) if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) try: win_x wx=[int(i) for i in win_x.split()] dem=dem[:,wx[0]:wx[1]] except: print '' try: win_y wy=[int(i) for i in win_y.split()] dem=dem[wy[0]:wy[1],:] except: print '' if flip_lr=='yes': dem=np.fliplr(dem) if flip_ud=='yes': dem=np.flipud(dem) cmap_dem=plt.get_cmap('gray') if disp_geo in ('yes','Yes','Y','y','YES') and geocoord in ('yes','Yes','Y','y','YES'): print 'display geo' # from mpl_toolkits.basemap import Basemap # m = Basemap(llcrnrlon=llcrnrlon, llcrnrlat=llcrnrlat, urcrnrlon=urcrnrlon, urcrnrlat=urcrnrlat,resolution='f', area_thresh=1., projection='cyl') # m.imshow(ut.hillshade(dem,50.0), interpolation='nearest', origin='upper') # m.drawcoastlines(color='w',linewidth=0.8) # m.drawmapboundary() # draw a line around the map region # m.drawrivers() # m.drawparallels(numpy.arange(int(d1.min()), int(d1.max()), 1),linewidth=0.2,labels=[1,0,0,0]) # m.drawmeridians(numpy.arange(int(d0.min()), int(d0.max()), 1),linewidth=0.2,labels=[0,0,0,1]) else: print 'Not GEO' plt.imshow(ut.hillshade(dem,50.0),cmap=cmap_dem) except: print 'No DEM file' plt.imshow(data,cmap=ccmap, vmin=min, vmax=max) plt.colorbar() if k[0]=='velocity': plt.title('Velocity (m/yr)',fontsize=font_size) figName='velocity.pdf' elif k[0]=='temporal_coherence': plt.title('Temporal coherence',fontsize=font_size) figName='temporal_coherence.pdf' elif k[0]=='dem': plt.title('DEM error',fontsize=font_size) figName='DEM_error.pdf' elif k[0]=='rmse': plt.title('RMSE (m/yr)',fontsize=font_size) figName='rmse.pdf' elif k[0]=='mask': plt.title('Pixels with no valid value.',fontsize=font_size) figName='mask.pdf' if showRef=='yes': try: refPoint=ref_color+ref_symbol plt.plot(xref,yref,refPoint,ms=ref_size) except: print 'No reference point' plt.xlim(0,np.shape(data)[1]) plt.ylim(np.shape(data)[0],0) if saveFig=='yes': plt.savefig(figName) plt.show() # plt.savefig('fig.pdf') # fig = plt.figure() # ax.imshow(data,vmin=min, vmax=max) # ax.xaxis.label.set_fontsize(40) #################################################################### #################################################################### if 'timeseries' in k and allData2display=='yes': if rewrapping=='yes': print 'rewrapping' dateList=h5file['timeseries'].keys() nfigs=figs_rows*figs_cols ligram = len(dateList) range2phase=4*np.pi/float(h5file['timeseries'].attrs['WAVELENGTH']) # range2phase=4*np.pi/0.056 print 'number of timeseries epochs to display:'+ str(ligram) kk=int(ligram/nfigs)+1 ii=0 for j in range(1,kk): fig = plt.figure(j) ii=(j-1)*nfigs+1 for i in range(ii,ii+nfigs): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) dset=h5file['timeseries'].get(dateList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] data=range2phase*data # data=np.angle(np.exp(1j*data)) data=rewrap(data) ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(dateList[i-1],fontsize=font_size) elif title=='in': add_inner_title(ax, dateList[i-1], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) figName=k[0]+'_'+str(j)+'.pdf' if saveFig in ['yes','Yes','y','YES']: plt.savefig(figName) fig = plt.figure(kk) ii=(kk-1)*nfigs+1 for i in range(ii,ligram+1): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) dset=h5file['timeseries'].get(dateList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] data=range2phase*data # data=np.angle(np.exp(1j*data)) data=rewrap(data) ax.imshow(data,cmap=ccmap) ax.xaxis.label.set_fontsize(20) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(dateList[i-1],fontsize=font_size) elif title =='in': add_inner_title(ax, dateList[i-1], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) figName=k[0]+'_'+str(kk)+'.pdf' if saveFig in ['yes','Yes','y','YES']: plt.savefig(figName) plt.show() else: print 'No rewrapping' dateList=h5file['timeseries'].keys() nfigs=figs_rows*figs_cols ligram = len(dateList) print 'number of timeseries epochs to display:'+ str(ligram) kk=int(ligram/nfigs)+1 ii=0 for j in range(1,kk): fig = plt.figure(j) ii=(j-1)*nfigs+1 for i in range(ii,ii+nfigs): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) data=h5file['timeseries'].get(dateList[i-1]) try: im=ax.imshow(data,cmap=ccmap,vmin=min,vmax=max) # print 'here' except: im=ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(dateList[i-1],fontsize=font_size) elif title=='in': add_inner_title(ax, dateList[i-1], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) fig = plt.figure(kk) ii=(kk-1)*nfigs+1 for i in range(ii,ligram+1): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) data=h5file['timeseries'].get(dateList[i-1]) try: im=ax.imshow(data,cmap=ccmap,vmin=min,vmax=max) except: im=ax.imshow(data,cmap=ccmap) ax.xaxis.label.set_fontsize(20) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(dateList[i-1],fontsize=font_size) if title=='in': add_inner_title(ax, dateList[i-1], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) plt.show() #################################################################### #################################################################### elif 'timeseries' in k and allData2display=='no': dateList=h5file['timeseries'].keys() try: epoch_number except: epoch_number=dateList.index(epoch_date) range2phase=4*np.pi/float(h5file['timeseries'].attrs['WAVELENGTH']) # range2phase=4*np.pi/0.056 dset=h5file['timeseries'].get(dateList[epoch_number]) data = dset[0:dset.shape[0],0:dset.shape[1]] if rewrapping=='yes': data=range2phase*data # data=np.angle(np.exp(1j*data)) data=rewrap(data) try: min except: min=np.nanmin(data) try: max except: max=np.nanmax(data) plt.imshow(data,cmap=ccmap,vmin=min,vmax=max) plt.colorbar() plt.show() ################################################################ ################################################################ if k[0]in('interferograms','coherence','wrapped') and allData2display=='yes': if k[0] in ('coherence','wrapped'): rewrapping='no' # color_map = 'gray' # ccmap=plt.get_cmap(color_map) if rewrapping=='yes': ifgramList=h5file[k[0]].keys() nfigs=figs_rows*figs_cols ligram = len(ifgramList) print 'number of '+k[0]+' to display:'+ str(ligram) kk=int(ligram/nfigs)+1 ii=0 for j in range(1,kk): fig = plt.figure(j) ii=(j-1)*nfigs+1 for i in range(ii,ii+nfigs): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) dset = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] data=np.angle(np.exp(1j*data)) ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(h5file[k[0]][ifgramList[i-1]].attrs['DATE12'],fontsize=font_size) elif title=='in': add_inner_title(ax, h5file[k[0]][ifgramList[i-1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) fig = plt.figure(kk) ii=(kk-1)*nfigs+1 for i in range(ii,ligram+1): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) dset = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] data=np.angle(np.exp(1j*data)) ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(h5file[k[0]][ifgramList[i-1]].attrs['DATE12'],fontsize=font_size) elif title=='in': add_inner_title(ax, h5file[k[0]][ifgramList[i-1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) plt.show() else: ifgramList=h5file[k[0]].keys() nfigs=figs_rows*figs_cols ligram = len(ifgramList) print 'number of '+k[0]+' to display:'+ str(ligram) kk=int(ligram/nfigs)+1 ii=0 for j in range(1,kk): fig = plt.figure(j) ii=(j-1)*nfigs+1 for i in range(ii,ii+nfigs): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) print 'loading '+ifgramList[i-1] dset = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] try: ax.imshow(data,vmin=min,vmax=max,cmap=ccmap) except: ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(h5file[k[0]][ifgramList[i-1]].attrs['DATE12'],fontsize=font_size) elif title=='in': add_inner_title(ax, h5file[k[0]][ifgramList[i-1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) fig = plt.figure(kk) ii=(kk-1)*nfigs+1 for i in range(ii,ligram+1): ax = fig.add_subplot(figs_rows,figs_cols,i-ii+1) print 'loading '+ifgramList[i-1] dset = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) data = dset[0:dset.shape[0],0:dset.shape[1]] #data = h5file[k[0]][ifgramList[i-1]].get(ifgramList[i-1]) try: ax.imshow(data,vmin=min,vmax=max,cmap=ccmap) except: ax.imshow(data,cmap=ccmap) ax.set_yticklabels([]) ax.set_xticklabels([]) ax.set_xticks([]) ax.set_yticks([]) if title=='out': ax.set_title(h5file[k[0]][ifgramList[i-1]].attrs['DATE12'],fontsize=font_size) elif title=='in': add_inner_title(ax, h5file[k[0]][ifgramList[i-1]].attrs['DATE12'], loc=1) fig.subplots_adjust(wspace=Wspace,hspace=Hspace) plt.show() #################################################################### #################################################################### elif k[0]in('interferograms','coherence','wrapped') and allData2display=='no': if k[0] in ('coherence','wrapped'): rewrapping=='no' ifgramList=h5file[k[0]].keys() try: epoch_number except: for i in range(len(ifgramList)): if epoch_date in ifgramList[i]: epoch_number = i dset = h5file[k[0]][ifgramList[epoch_number]].get(ifgramList[epoch_number]) data = dset[0:dset.shape[0],0:dset.shape[1]] if rewrapping=='yes': data=np.angle(np.exp(1j*data)) if dip_opposite in('yes','Yes','Y','y','YES'): data=-1*data #DEM basemap try: demFile if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) try: win_x wx=[int(i) for i in win_x.split()] dem=dem[:,wx[0]:wx[1]] except: print '' try: win_y wy=[int(i) for i in win_y.split()] dem=dem[wy[0]:wy[1],:] except: print '' if flip_lr=='yes': dem=np.fliplr(dem) if flip_ud=='yes': dem=np.flipud(dem) cmap_dem=plt.get_cmap('gray') if disp_geo in ('yes','Yes','Y','y','YES') and geocoord in ('yes','Yes','Y','y','YES'): print 'display geo' else: print 'Not GEO' plt.imshow(ut.hillshade(dem,50.0),cmap=cmap_dem) except: print 'No DEM file' try: plt.imshow(data,cmap=ccmap,vmin=min,vmax=max) except: plt.imshow(data,cmap=ccmap) plt.colorbar() # plt.title(h5file[k[0]][ifgramList[epoch_number]].attrs['DATE12'],fontsize=font_size) plt.title(ifgramList[epoch_number],fontsize=font_size) plt.show() ################################################################ ################################################################ h5file.close()
def main(argv): #default settings markerSize=16 markerSize2=16 markerColor='g' markerColor2='red' lineWidth=2 fontSize=16 unit='cm' Save_timeseries='no' dispTsFig='yes' dispVelFig='yes' dispContour='only' contour_step=200 smoothContour='no' radius=0; edgeWidth=1.5 fig_dpi=300 if len(sys.argv)>2: try: opts, args = getopt.getopt(argv,"f:F:v:a:b:s:m:c:w:u:l:h:S:D:C:V:t:T:d:r:x:y:P:p:") except getopt.GetoptError: Usage() ; sys.exit(1) for opt,arg in opts: if opt == '-f': timeSeriesFile = arg elif opt == '-F': timeSeriesFile_2 = arg elif opt == '-v': velocityFile = arg elif opt == '-a': vmin = float(arg) elif opt == '-b': vmax = float(arg) elif opt == '-s': fontSize = int(arg) elif opt == '-m': markerSize=int(arg); markerSize2=int(arg) elif opt == '-S': Save_timeseries=arg elif opt == '-c': markerColor=arg elif opt == '-w': lineWidth=int(arg) elif opt == '-u': unit=arg elif opt == '-l': lbound=float(arg) elif opt == '-h': hbound=float(arg) elif opt == '-D': demFile=arg elif opt == '-C': dispContour=arg elif opt == '-V': contour_step=float(arg) elif opt == '-t': minDate=arg elif opt == '-T': maxDate=arg elif opt == '-d': datesNot2show = arg.split() elif opt == '-r': radius=abs(int(arg)) elif opt == '-x': xsub = [int(i) for i in arg.split(':')]; xsub.sort(); dispVelFig='no' elif opt == '-y': ysub = [int(i) for i in arg.split(':')]; ysub.sort(); dispVelFig='no' elif opt == '-P': dispTsFig=arg elif opt == '-p': dispVelFig=arg elif len(sys.argv)==2: if argv[0]=='-h': Usage(); sys.exit(1) elif os.path.isfile(argv[0]): timeSeriesFile = argv[0] h5timeseries = h5py.File(timeSeriesFile) if not 'timeseries' in h5timeseries.keys(): print 'ERROR' Usage(); sys.exit(1) else: Usage(); sys.exit(1) elif len(sys.argv)<2: Usage(); sys.exit(1) if unit in ('m','M'): unitFac=1 elif unit in ('cm','Cm','CM'): unitFac=100 elif unit in ('mm','Mm','MM','mM'): unitFac=1000 else: print 'Warning:' print 'wrong unit input!' print 'cm is considered to display the displacement' ############################################################## # Read time series file info if not os.path.isfile(timeSeriesFile): Usage();sys.exit(1) h5timeseries = h5py.File(timeSeriesFile) if not 'timeseries' in h5timeseries.keys(): Usage(); sys.exit(1) dateList1 = h5timeseries['timeseries'].keys() ############################################################## # Dates to show time series plot import matplotlib.dates as mdates years = mdates.YearLocator() # every year months = mdates.MonthLocator() # every month yearsFmt = mdates.DateFormatter('%Y') print '*******************' print 'All dates existed:' print dateList1 print '*******************' try: datesNot2show print 'dates not to show: '+str(datesNot2show) except: datesNot2show=[] try: minDate minDateyy=yyyymmdd2years(minDate) print 'minimum date: '+minDate for date in dateList1: yy=yyyymmdd2years(date) if yy < minDateyy: datesNot2show.append(date) except: pass try: maxDate maxDateyy=yyyymmdd2years(maxDate) print 'maximum date: '+maxDate for date in dateList1: yy=yyyymmdd2years(date) if yy > maxDateyy: datesNot2show.append(date) except: pass try: dateList=[] for date in dateList1: if date not in datesNot2show: dateList.append(date) print '--------------------------------------------' print 'dates used to show time series displacements:' print dateList print '--------------------------------------------' except: dateList=dateList1 print 'using all dates to show time series displacement' ################################################################### # Date info dateIndex={} for ni in range(len(dateList)): dateIndex[dateList[ni]]=ni tbase=[] d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5]) for ni in range(len(dateList)): d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5]) diff = d2-d1 tbase.append(diff.days) dates=[] for ni in range(len(dateList)): d = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5]) dates.append(d) datevector=[] for i in range(len(dates)): datevector.append(np.float(dates[i].year) + np.float(dates[i].month-1)/12 + np.float(dates[i].day-1)/365) datevector2=[round(i,2) for i in datevector] ########################################### # Plot Fig 1 - Velocity / last epoch of time series / DEM import matplotlib.pyplot as plt if dispVelFig in ('yes','Yes','y','Y','YES'): fig = plt.figure() ax=fig.add_subplot(111) try: velocityFile h5file=h5py.File(velocityFile,'r') k=h5file.keys() dset= h5file[k[0]].get(k[0]) print 'display: ' + k[0] except: dset = h5timeseries['timeseries'].get(h5timeseries['timeseries'].keys()[-1]) print 'display: last epoch of timeseries' #DEM/contour option try: demFile import _readfile as readfile if os.path.basename(demFile).split('.')[1]=='hgt': amp,dem,demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1]=='dem': dem,demRsc = readfile.read_dem(demFile) if dispContour in ('no','No','n','N','NO','yes','Yes','y','Y','YES'): print 'show DEM as basemap' cmap_dem=plt.get_cmap('gray') import _pysar_utilities as ut plt.imshow(ut.hillshade(dem,50.0),cmap=cmap_dem) if dispContour in ('only','Only','o','O','ONLY','yes','Yes','y','Y','YES'): print 'show contour' if smoothContour in ('yes','Yes','y','Y','YES'): import scipy.ndimage as ndimage dem=ndimage.gaussian_filter(dem,sigma=10.0,order=0) contour_sequence=np.arange(-6000,9000,contour_step) plt.contour(dem,contour_sequence,origin='lower',colors='black',alpha=0.5) except: print 'No DEM file' try: img=ax.imshow(dset,vmin=vmin,vmax=vmax) except: img=ax.imshow(dset) import matplotlib.patches as patches # need for draw rectangle of points selected on VelFig ########################################## # Plot Fig 2 - Time series plot import scipy.stats as stats fig2 = plt.figure(2) ax2=fig2.add_subplot(111) try: timeSeriesFile_2 h5timeseries_2=h5py.File(timeSeriesFile_2) print 'plot 2nd time series' except: pass ########### Plot Time Series with x/y ########## try: xsub ysub try: xmin=xsub[0]; xmax=xsub[1]+1; print 'x='+str(xsub[0])+':'+str(xsub[1]) except: xmin=xsub[0]-radius; xmax=xsub[0]+radius+1; print 'x='+str(xsub[0])+'+/-'+str(radius) try: ymin=ysub[0]; ymax=ysub[1]+1; print 'y='+str(ysub[0])+':'+str(ysub[1]) except: ymin=ysub[0]-radius; ymax=ysub[0]+radius+1; print 'y='+str(ysub[0])+'+/-'+str(radius) try: fig rectSelect=patches.Rectangle((xmin,ymin),radius*2+1,radius*2+1,fill=False,lw=edgeWidth) ax.add_patch(rectSelect) except: pass Dis=[] for date in dateList: Dis.append(h5timeseries['timeseries'].get(date)[ymin:ymax,xmin:xmax]) Dis0=array(Dis) dis=Dis0*unitFac dis=reshape(dis,(len(dateList),-1)) dis_mean=stats.nanmean(dis,1) if (xmax-xmin)*(ymax-ymin)==1: dis_std=[0]*len(dateList) else: dis_std=stats.nanstd(dis,1) (_, caps, _)=ax2.errorbar(dates,dis_mean,yerr=dis_std,fmt='-ko',\ ms=markerSize, lw=lineWidth, alpha=1, mfc=markerColor,\ elinewidth=edgeWidth,ecolor='black',capsize=markerSize*0.5) for cap in caps: cap.set_markeredgewidth(edgeWidth) print dis_mean # x axis format ax2.fmt_xdata = DateFormatter('%Y-%m-%d %H:%M:%S') if unitFac==100: ax2.set_ylabel('Displacement [cm]',fontsize=fontSize) elif unitFac==1000: ax2.set_ylabel('Displacement [mm]',fontsize=fontSize) else: ax2.set_ylabel('Displacement [m]' ,fontsize=fontSize) ax2.set_xlabel('Time [years]',fontsize=fontSize) ax2.set_title('x='+str(xmin)+':'+str(xmax-1)+', y='+str(ymin)+':'+str(ymax-1)) ax2.xaxis.set_major_locator(years) ax2.xaxis.set_major_formatter(yearsFmt) ax2.xaxis.set_minor_locator(months) datemin = datetime.date(int(datevector[0]),1,1) datemax = datetime.date(int(datevector[-1])+1,1,1) ax2.set_xlim(datemin, datemax) # y axis format try: lbound hbound ax2.set_ylim(lbound,hbound) except: ax2.set_ylim(nanmin(dis_mean-dis_std)-0.4*abs(nanmin(dis_mean)),\ nanmax(dis_mean+dis_std)+0.4*abs(nanmax(dis_mean))) for tick in ax2.xaxis.get_major_ticks(): tick.label.set_fontsize(fontSize) for tick in ax2.yaxis.get_major_ticks(): tick.label.set_fontsize(fontSize) #fig2.autofmt_xdate() #adjust x overlap by rorating, may enble again if Save_timeseries in ('yes','Yes','Y','y','YES'): import scipy.io as sio Delay={} Delay['displacement']=Dis0 Delay['unit']='m' Delay['time']=datevector tsNameBase='ts_x'+str(xmin)+'_'+str(xmax-1)+'y'+str(ymin)+'_'+str(ymax-1) sio.savemat(tsNameBase+'.mat', {'displacement': Delay}) print 'saved data to '+tsNameBase+'.mat' plt.savefig(tsNameBase+'.pdf',dpi=fig_dpi) print 'saved plot to '+tsNameBase+'.pdf'
correlation_with_dem.py radar_8rlks.hgt velocity.h5 *********************************************************************** *********************************************************************** ''' try: demFile = sys.argv[1] File = sys.argv[2] except: Usage() sys.exit(1) if os.path.basename(demFile).split('.')[1] == 'hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1] == 'dem': dem, demRsc = readfile.read_dem(demFile) #amp,dem,demRsc = readfile.read_float32(demFile) h5data = h5py.File(File) dset = h5data['velocity'].get('velocity') data = dset[0:dset.shape[0], 0:dset.shape[1]] try: suby = sys.argv[3].split(':') subx = sys.argv[4].split(':') data = data[int(suby[0]):int(suby[1]), int(subx[0]):int(subx[1])] dem = dem[int(suby[0]):int(suby[1]), int(subx[0]):int(subx[1])] except:
def main(argv): try: File = argv[0] demFile = argv[1] p = int(argv[2]) except: usage() sys.exit(1) try: baseline_error = argv[3] except: baseline_error = 'range_and_azimuth' print(baseline_error) ################################## h5file = h5py.File(File) dateList = list(h5file['timeseries'].keys()) ################################## try: maskFile = argv[4] except: if os.path.isfile('Modified_Mask.h5'): maskFile = 'Modified_Mask.h5' elif os.path.isfile('Mask.h5'): maskFile = 'Mask.h5' else: print('No mask found!') sys.exit(1) try: Mask, Matr = readfile.read(maskFile) print('mask: ' + maskFile) except: print('Can not open mask file: ' + maskFile) sys.exit(1) #try: # maskFile=argv[4] # h5Mask = h5py.File(maskFile,'r') # kMask=h5Mask.keys() # dset1 = h5Mask[kMask[0]].get(kMask[0]) # Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] #except: # dset1 = h5file['mask'].get('mask') # Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] ################################## Mask = Mask.flatten(1) ndx = Mask != 0 ################################## # h5file = h5py.File(File) # dateList = h5file['timeseries'].keys() ################################## nt = float(h5file['timeseries'].attrs['LOOK_REF1']) ft = float(h5file['timeseries'].attrs['LOOK_REF2']) sy, sx = np.shape(dset1) npixel = sx * sy lookangle = np.tile(np.linspace(nt, ft, sx), [sy, 1]) lookangle = lookangle.flatten(1) * np.pi / 180.0 Fh = -np.sin(lookangle) Fv = -np.cos(lookangle) print('Looking for azimuth pixel size') try: daz = float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE']) except: print(''' ERROR! The attribute AZIMUTH_PIXEL_SIZE was not found! Possible cause of error: Geo coordinate. This function works only in radar coordinate system. ''') sys.exit(1) lines = np.tile(np.arange(0, sy, 1), [1, sx]) lines = lines.flatten(1) rs = lines * daz if baseline_error == 'range_and_azimuth': A = np.zeros([npixel, 4]) A[:, 0] = Fh A[:, 1] = Fh * rs A[:, 2] = Fv A[:, 3] = Fv * rs num_base_par = 4 elif baseline_error == 'range': A = np.zeros([npixel, 2]) A[:, 0] = Fh A[:, 1] = Fv num_base_par = 2 ########################################### yref = int(h5file['timeseries'].attrs['ref_y']) xref = int(h5file['timeseries'].attrs['ref_x']) ########################################### if os.path.basename(demFile).split('.')[1] == 'hgt': amp, dem, demRsc = readfile.read_float32(demFile) elif os.path.basename(demFile).split('.')[1] == 'dem': dem, demRsc = readfile.read_real_int16(demFile) dem = dem - dem[yref][xref] dem = dem.flatten(1) ################################################### if p == 1: # A=np.vstack((dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem, np.ones(len(dem)))).T elif p == 2: # A=np.vstack((dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**2, dem, np.ones(len(dem)))).T elif p == 3: # A = np.vstack((dem[ndx]**3,dem[ndx]**2,dem[ndx],np.ones(len(dem[ndx])))).T B = np.vstack((dem**3, dem**2, dem, np.ones(len(dem)))).T print(np.shape(A)) Ainv = np.linalg.pinv(A) ################################################### Bh = [] Bv = [] Bhrate = [] Bvrate = [] Be = np.zeros([len(dateList), num_base_par + p + 1]) print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') for i in range(1, len(dateList)): dset = h5file['timeseries'].get(dateList[i]) data = dset[0:dset.shape[0], 0:dset.shape[1]] L = data.flatten(1) M = np.hstack((A, B)) Berror = np.dot(np.linalg.pinv(M[ndx]), L[ndx]) Bh.append(Berror[0]) Bhrate.append(Berror[1]) Bv.append(Berror[2]) Bvrate.append(Berror[3]) Be[i, :] = Berror print(Berror) print( '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' ) print('baseline error mean std') print(' bh : ' + str(np.mean(Bh)) + ' , ' + str(np.std(Bh))) print(' bh rate : ' + str(np.mean(Bhrate)) + ' , ' + str(np.std(Bhrate))) print(' bv : ' + str(np.mean(Bv)) + ' , ' + str(np.std(Bv))) print(' bv rate : ' + str(np.mean(Bvrate)) + ' , ' + str(np.std(Bvrate))) print( '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' ) # plt.hist(Bh,bins=8,normed=True) # formatter = FuncFormatter(to_percent) # Set the formatter # plt.gca().yaxis.set_major_formatter(formatter) # plt.show() print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') # print 'Estimating Baseline error from each differences ...' orbEffect = np.zeros([len(dateList), sy, sx]) for i in range(1, len(dateList)): effect = np.dot(M, Be[i, :]) effect = np.reshape(effect, [sx, sy]).T # orbEffect[i,:,:]=orbEffect[i-1,:,:]+effect # orbEffect[i,:,:]=orbEffect[i,:,:]-orbEffect[i,yref,xref] orbEffect[i, :, :] = effect - effect[yref, xref] del effect print('Correctiing the time series ') outName = File.replace('.h5', '') + '_baseTropCor.h5' h5orbCor = h5py.File(outName, 'w') group = h5orbCor.create_group('timeseries') for i in range(len(dateList)): dset1 = h5file['timeseries'].get(dateList[i]) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] - orbEffect[i, :, :] dset = group.create_dataset(dateList[i], data=data, compression='gzip') for key, value in h5file['timeseries'].attrs.items(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') group = h5orbCor.create_group('mask') dset = group.create_dataset('mask', data=dset1, compression='gzip') h5file.close() h5orbCor.close() return
def main(argv): try: file=argv[0] alks=float(argv[1]) rlks=float(argv[2]) except: Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks'+ext if ext == '.int' or ext == '.slc': a,p,r = readfile.read_complex64(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a,p,r = readfile.read_float32(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) writefile.write_float32(plks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.dem'): d,r = readfile.read_dem(file) dlks=multilook(d,alks,rlks) print 'writing '+outName writefile.write_dem(dlks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r=readfile.read_rsc_file(file+'.rsc') except: sys.exit(1) r['FILE_LENGTH']=str(height) r['WIDTH']=str(width) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.h5'): h5file=h5py.File(file,'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks=h5py.File(outName,'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList=h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks=multilook(unw,alks,rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) dset1=h5file['mask'].get('mask') mask=dset1[0:dset1.shape[0],0:dset1.shape[1]] masklks=multilook(mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList=h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks=multilook(unw,alks,rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k=h5file.keys() print 'multi looking the '+ k[0] group=h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH']=Masklks.shape[1] group.attrs['FILE_LENGTH']=Masklks.shape[0] h5file.close() h5file_lks.close()