def check_size(k, epochList): width_list = [] length_list = [] epoch_list = [] for epoch in epochList: rscFile = readfile.read_rsc_file(epoch + '.rsc') width = rscFile['WIDTH'] length = rscFile['FILE_LENGTH'] width_list.append(width) length_list.append(length) epoch_list.append(epoch) mode_width = mode(width_list) mode_length = mode(length_list) if width_list.count(mode_width) != len(width_list) or length_list.count( mode_length) != len(length_list): print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n' print 'WARNING: Some ' + k + ' may have the wrong dimensions!\n' print 'All ' + k + ' should have the same size.\n' print 'The width and length of the majority of ' + k + ' are: ' + str( mode_width) + ', ' + str(mode_length) + '\n' print 'But the following ' + k + ' have different dimensions and thus not considered in the time-series: \n' for epoch in epoch_list: rscFile = readfile.read_rsc_file(epoch + '.rsc') width = rscFile['WIDTH'] length = rscFile['FILE_LENGTH'] if width != mode_width or length != mode_length: print ' ' + epoch + ' width: ' + width + ' length: ' + length epochList.remove(epoch) print '\nNumber of ' + k + ' to be loaded: ' + str(len(epochList)) print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' return epochList, mode_width, mode_length
def check_size(k,epochList): width_list =[] length_list=[] epoch_list =[] for epoch in epochList: rscFile=readfile.read_rsc_file(epoch+'.rsc') width = rscFile['WIDTH'] length = rscFile['FILE_LENGTH'] width_list.append(width) length_list.append(length) epoch_list.append(epoch) mode_width=mode(width_list) mode_length=mode(length_list) if width_list.count(mode_width)!=len(width_list) or length_list.count(mode_length)!=len(length_list): print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n' print 'WARNING: Some '+k+' may have the wrong dimensions!\n' print 'All '+k+' should have the same size.\n' print 'The width and length of the majority of '+k+' are: ' + str(mode_width)+', '+str(mode_length)+'\n' print 'But the following '+k+' have different dimensions and thus not considered in the time-series: \n' for epoch in epoch_list: rscFile=readfile.read_rsc_file(epoch+'.rsc') width = rscFile['WIDTH'] length = rscFile['FILE_LENGTH'] if width != mode_width or length != mode_length: print ' '+ epoch + ' width: '+width+' length: '+length epochList.remove(epoch) print '\nNumber of '+k+' to be loaded: '+str(len(epochList)) print '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%' return epochList, mode_width, mode_length
def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv('TSSARDIR') +'/'+projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv('SCRATCHDIR') + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath=templateContents['pysar.inputdata'] igramPath=check_variable_name(igramPath) except: igramPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath=templateContents['pysar.CorFiles'] corPath=check_variable_name(corPath) except: corPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath=templateContents['pysar.wrapped'] wrapPath=check_variable_name(wrapPath) except: wrapPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir+'/LoadedData.h5'): print '\nLoadedData.h5'+ ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k,igramList) # number check igramList,mode_width,mode_length = check_size(k,igramList) # size check h5file = tssarProjectDir+'/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero=np.ones([int(mode_length),int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp,unw,unwrsc = readfile.read_float32(igram) MaskZero=amp*MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(igram)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(igram) Mask=np.ones([int(mode_length),int(mode_width)]) Mask[MaskZero==0]=0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file,'w') group=h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir+'/Coherence.h5'): print '\nCoherence.h5'+ ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k,corList) # number check corList,mode_width,mode_length = check_size(k,corList) # size check h5file = tssarProjectDir+'/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence=np.zeros([int(mode_length),int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp,unw,unwrsc = readfile.read_float32(cor) meanCoherence=meanCoherence+unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(cor)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(cor) meanCoherence=meanCoherence/(len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir+'/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean,'w') group=fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir+'/Wrapped.h5'): print '\nWrapped.h5'+ ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k,wrapList) # number check wrapList,mode_width,mode_length = check_size(k,wrapList) # size check h5file = tssarProjectDir+'/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp,unw,unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(wrap)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed '+str(len(wrapList))+' wrapped interferograms to '+h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile=templateContents['pysar.geomap'] geomapFile=check_variable_name(geomapFile) cpCmd="cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd="cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): try: file = argv[0] alks = float(argv[1]) rlks = float(argv[2]) except: Usage() sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_a' + str(int(alks)) + 'lks_r' + str( int(rlks)) + 'lks' + ext if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks = multilook(unw, alks, rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] masklks = multilook(mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks = multilook(unw, alks, rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'multi looking the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH'] = Masklks.shape[1] group.attrs['FILE_LENGTH'] = Masklks.shape[0] h5file.close() h5file_lks.close()
def main(argv): disRas = 'no' if len(sys.argv) > 2: try: opts, args = getopt.getopt(argv, "h:f:x:y:l:L:r:a:o:P:") except getopt.GetoptError: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': file = arg elif opt == '-x': xsub = [int(i) for i in arg.split(':')] xsub.sort() elif opt == '-y': ysub = [int(i) for i in arg.split(':')] ysub.sort() elif opt == '-l': latsub = [float(i) for i in arg.split(':')] latsub.sort() elif opt == '-L': lonsub = [float(i) for i in arg.split(':')] lonsub.sort() elif opt == '-r': mli_rg = int(arg) elif opt == '-a': mli_az = int(arg) elif opt == '-o': outname = arg elif opt == '-P': disRas = arg try: file except: Usage() sys.exit(1) elif len(sys.argv) == 2: file = argv[0] else: Usage() sys.exit(1) ############################################################ ext = os.path.splitext(file)[1] outname = 'subset_' + file try: parContents = readfile.read_rsc_file(file + '.rsc') width = int(parContents['WIDTH']) length = int(parContents['FILE_LENGTH']) except: parContents = readfile.read_par_file(file + '.par') width = int(parContents['range_samples:']) length = int(parContents['azimuth_lines:']) # subset try: ysub if ysub[1] > length: ysub[1] = length print 'ysub[1] > length! Set ysub[1]=length=' + str(length) except: ysub = [0, length] print 'no subset in y direction' try: xsub if xsub[1] > width: xsub[1] = width print 'xsub[1] > width! Set xsub[1]=width=' + str(width) except: xsub = [0, width] print 'no subset in x direction' if (ysub[1] - ysub[0]) * (xsub[1] - xsub[0]) < length * width: subsetCmd = 'subset.py -f ' + file + ' -x ' + str(xsub[0]) + ':' + str( xsub[1]) + ' -y ' + str(ysub[0]) + ':' + str( ysub[1]) + ' -o ' + outname print subsetCmd os.system(subsetCmd) else: outname = file print 'No subset.' # generate .ras file if ext == '.mli': try: mli_rg except: mli_rg = 1 try: mli_az except: mli_az = 1 rasCmd = 'raspwr ' + outname + ' ' + str( xsub[1] - xsub[0]) + ' 1 0 ' + str(mli_rg) + ' ' + str( mli_az) + ' 1. .35 1 - 0' print rasCmd os.system(rasCmd) elif ext in ('.slc', '.SLC'): try: mli_rg except: mli_rg = 1 try: mli_az except: mli_az = 2 rasCmd = 'rasSLC ' + outname + ' ' + str( xsub[1] - xsub[0]) + ' 1 0 ' + str(mli_rg) + ' ' + str( mli_az) + ' 1. .35 1 1' print rasCmd os.system(rasCmd) else: print 'Not recognized file extension!' Usage() sys.exit(1) # display .ras file if disRas in ('yes', 'Yes', 'Y', 'y', 'YES'): disCmd = 'display ' + outname + '.ras' print disCmd os.system(disCmd)
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == "-f": file = arg elif opt == "-t": filtType = arg elif opt == "-p": par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split(".")[0] + "_" + filtType + ext try: par except: par = [] print "+++++++++++++++++++++++++++" print "Filter type : " + filtType print "parameters : " + str(par) print "+++++++++++++++++++++++++++" ############################################### if ext == ".int" or ext == ".slc": a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == ".unw" or ext == ".cor" or ext == ".hgt": a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".dem"): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print "writing " + outName writefile.write_dem(dlks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext in [".jpeg", "jpg", "png"]: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print "writing " + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + ".rsc") except: sys.exit(1) r["FILE_LENGTH"] = str(height) r["WIDTH"] = str(width) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".h5"): h5file = h5py.File(file, "r") # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, "w") if "interferograms" in h5file.keys(): print "Filtering the interferograms in space" gg = h5file_lks.create_group("interferograms") igramList = h5file["interferograms"].keys() for igram in igramList: print igram unwSet = h5file["interferograms"][igram].get(igram) unw = unwSet[0 : unwSet.shape[0], 0 : unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression="gzip") for key, value in h5file["interferograms"][igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file["mask"].get("mask") mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=mask, compression="gzip") elif "timeseries" in h5file.keys(): print "Filtering the time-series" group = h5file_lks.create_group("timeseries") dateList = h5file["timeseries"].keys() for d in dateList: print d dset1 = h5file["timeseries"].get(d) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression="gzip") for key, value in h5file["timeseries"].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file["mask"].get("mask") Mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=Mask, compression="gzip") except: print "Filterd file does not include the maske" elif "temporal_coherence" in h5file.keys() or "velocity" in h5file.keys() or "mask" in h5file.keys(): k = h5file.keys() print "filtering the " + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression="gzip") for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv( 'TSSARDIR' ) + '/' + projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath = templateContents['pysar.inputdata'] igramPath = check_variable_name(igramPath) except: igramPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath = templateContents['pysar.CorFiles'] corPath = check_variable_name(corPath) except: corPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath = templateContents['pysar.wrapped'] wrapPath = check_variable_name(wrapPath) except: wrapPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir + '/LoadedData.h5'): print '\nLoadedData.h5' + ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k, igramList) # number check igramList, mode_width, mode_length = check_size( k, igramList) # size check h5file = tssarProjectDir + '/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero = np.ones([int(mode_length), int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp, unw, unwrsc = readfile.read_float32(igram) MaskZero = amp * MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( igram) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(igram) Mask = np.ones([int(mode_length), int(mode_width)]) Mask[MaskZero == 0] = 0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file, 'w') group = h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir + '/Coherence.h5'): print '\nCoherence.h5' + ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k, corList) # number check corList, mode_width, mode_length = check_size(k, corList) # size check h5file = tssarProjectDir + '/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence = np.zeros([int(mode_length), int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp, unw, unwrsc = readfile.read_float32(cor) meanCoherence = meanCoherence + unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( cor) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(cor) meanCoherence = meanCoherence / (len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir + '/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean, 'w') group = fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir + '/Wrapped.h5'): print '\nWrapped.h5' + ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k, wrapList) # number check wrapList, mode_width, mode_length = check_size(k, wrapList) # size check h5file = tssarProjectDir + '/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp, unw, unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( wrap) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed ' + str( len(wrapList)) + ' wrapped interferograms to ' + h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile = templateContents['pysar.geomap'] geomapFile = check_variable_name(geomapFile) cpCmd = "cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): #outName='subsetIgrams.h5' try: opts, args = getopt.getopt(argv,"h:f:x:y:o:l:L:") except getopt.GetoptError: print 'Error while getting args' Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt=='-y': ysub=[int(i) for i in arg.split(':')] ysub.sort() elif opt=='-x': xsub = [int(i) for i in arg.split(':')] xsub.sort() elif opt=='-o': outName=arg elif opt=='-l': Latsub=[float(i) for i in arg.split(':')] Latsub.sort() elif opt=='-L': Lonsub = [float(i) for i in arg.split(':')] Lonsub.sort() ##################################################### try: File xsub ysub except: try: File Latsub Lonsub except: Usage();sys.exit(1) try: outName except: outName='subset_'+File ext = os.path.splitext(File)[1] if ext == '.h5': try: h5file=h5py.File(File,'r') except: Usage() ; sys.exit(1) k=h5file.keys() # convert LatLon to xy for geocoded file try: Latsub Lonsub if 'X_FIRST' in h5file[k[0]].attrs.keys(): xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(h5file[k[0]].attrs['X_FIRST']))/float(h5file[k[0]].attrs['X_STEP'])) xsub[1]=int((Lonsub[1]-float(h5file[k[0]].attrs['X_FIRST']))/float(h5file[k[0]].attrs['X_STEP'])) ysub[0]=int((Latsub[1]-float(h5file[k[0]].attrs['Y_FIRST']))/float(h5file[k[0]].attrs['Y_STEP'])) ysub[1]=int((Latsub[0]-float(h5file[k[0]].attrs['Y_FIRST']))/float(h5file[k[0]].attrs['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' elif 'X_FIRST' in h5file[k[0]][h5file[k[0]].keys()[0]].attrs.keys(): # for geocoded interferograms/coherence igramList=h5file[k[0]].keys() xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(h5file[k[0]][igramList[0]].attrs['X_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) xsub[1]=int((Lonsub[1]-float(h5file[k[0]][igramList[0]].attrs['X_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) ysub[0]=int((Latsub[1]-float(h5file[k[0]][igramList[0]].attrs['Y_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) ysub[1]=int((Latsub[0]-float(h5file[k[0]][igramList[0]].attrs['Y_FIRST']))/float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' else: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 # k=h5file.keys() if 'interferograms' in k: igramList=h5file['interferograms'].keys() h5out=h5py.File(outName,'w') gg=h5out.create_group('interferograms') for igram in igramList: print igram dset1=h5file['interferograms'][igram].get(igram) group=gg.create_group(igram) dset=group.create_dataset(igram, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file['interferograms'][igram].attrs.keys(): group.attrs['X_FIRST']=float(h5file['interferograms'][igram].attrs['X_FIRST']) + xsub[0]*float(h5file['interferograms'][igram].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file['interferograms'][igram].attrs['Y_FIRST']) + ysub[0]*float(h5file['interferograms'][igram].attrs['Y_STEP']) gm=h5out.create_group('mask') try: Mset=h5file['mask'].get('mask') dset=gm.create_dataset('mask', data=Mset[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') except: print 'No group for mask found! It may cause problem in other processing steps.' try: Cset=h5file['meanCoherence'].get('meanCoherence') gm=h5out.create_group('meanCoherence') dset=gm.create_dataset('meanCoherence', data=Cset[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') except: print 'No average coherence found in the File' elif k[0] in ('coherence','wrapped'): corList=h5file[k[0]].keys() h5out=h5py.File(outName,'w') gg=h5out.create_group(k[0]) for cor in corList: print cor dset1=h5file[k[0]][cor].get(cor) group=gg.create_group(cor) dset=group.create_dataset(cor, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file[k[0]][cor].attrs.keys(): group.attrs['X_FIRST']=float(h5file[k[0]][cor].attrs['X_FIRST']) + xsub[0]*float(h5file[k[0]][cor].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file[k[0]][cor].attrs['Y_FIRST']) + ysub[0]*float(h5file[k[0]][cor].attrs['Y_STEP']) elif 'timeseries' in h5file.keys(): dateList=h5file['timeseries'].keys() h5out=h5py.File(outName,'w') group=h5out.create_group('timeseries') for d in dateList: print d dset1=h5file['timeseries'].get(d) dset=group.create_dataset(d, data=dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[0] group.attrs['WIDTH']=shape(dset1[ysub[0]:ysub[1],xsub[0]:xsub[1]])[1] group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file['timeseries'].attrs.keys(): group.attrs['X_FIRST']=float(h5file['timeseries'].attrs['X_FIRST']) + xsub[0]*float(h5file['timeseries'].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file['timeseries'].attrs['Y_FIRST']) + ysub[0]*float(h5file['timeseries'].attrs['Y_STEP']) h5file.close() h5out.close() elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys() or 'rmse' in h5file.keys(): print 'writing >>> ' +outName dset=h5file[k[0]].get(k[0]) data=dset[ysub[0]:ysub[1],xsub[0]:xsub[1]] hfout=h5py.File(outName,'w') group= hfout.create_group(k[0]) group.create_dataset(k[0],data=data,compression='gzip') for key,value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value group.attrs['FILE_LENGTH']=data.shape[0] group.attrs['WIDTH']=data.shape[1] group.attrs['XMIN']=0 group.attrs['XMAX']=data.shape[1]-1 group.attrs['YMIN']=0 group.attrs['YMAX']=data.shape[0]-1 group.attrs['subset_x0']=xsub[0] group.attrs['subset_x1']=xsub[1] group.attrs['subset_y0']=ysub[0] group.attrs['subset_y1']=ysub[1] if 'X_FIRST' in h5file[k[0]].attrs.keys(): group.attrs['X_FIRST']=float(h5file[k[0]].attrs['X_FIRST']) + xsub[0]*float(h5file[k[0]].attrs['X_STEP']) group.attrs['Y_FIRST']=float(h5file[k[0]].attrs['Y_FIRST']) + ysub[0]*float(h5file[k[0]].attrs['Y_STEP']) h5file.close() hfout.close() elif ext in ['.unw','.cor','.hgt']: a,p,r = readfile.read_float32(File) try: Latsub Lonsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 a=a[ysub[0]:ysub[1],xsub[0]:xsub[1]] p=p[ysub[0]:ysub[1],xsub[0]:xsub[1]] print 'writing >>> '+outName writefile.write_float32(p,outName) r['FILE_LENGTH']=str(p.shape[0]) r['WIDTH']=str(p.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext== '.dem': d,r = readfile.read_dem(File) try: Latsub Lonsub # print Latsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 d=d[ysub[0]:ysub[1],xsub[0]:xsub[1]] print 'writing >>> '+outName writefile.write_dem(d,outName) r['FILE_LENGTH']=str(d.shape[0]) r['WIDTH']=str(d.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(File) try: r=readfile.read_rsc_file(File+'.rsc') except: sys.exit(1) try: Latsub Lonsub try: r['X_FIRST'] xsub=[0]*2 ysub=[0]*2 xsub[0]=int((Lonsub[0]-float(r['X_FIRST']))/float(r['X_STEP'])) xsub[1]=int((Lonsub[1]-float(r['X_FIRST']))/float(r['X_STEP'])) ysub[0]=int((Latsub[1]-float(r['Y_FIRST']))/float(r['Y_STEP'])) ysub[1]=int((Latsub[0]-float(r['Y_FIRST']))/float(r['Y_STEP'])) print 'Subseting geocoded',ext,' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() ; sys.exit(1) except: Geo=0 box = (xsub[0],ysub[0],xsub[1],ysub[1]) output_img = im.crop(box) print 'writing >>> '+outName output_img.save(outName) # try: # r=readfile.read_rsc_file(File+'.rsc') # except: # sys.exit(1) r['FILE_LENGTH']=str(ysub[1]-ysub[0]) r['WIDTH']=str(xsub[1]-xsub[0]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) r['subset_x0']=str(xsub[0]) r['subset_x1']=str(xsub[1]) r['subset_y0']=str(ysub[0]) r['subset_y1']=str(ysub[1]) try: r['Y_FIRST']=str(float(r['Y_FIRST'])+ysub[0]*float(r['Y_STEP'])) r['X_FIRST']=str(float(r['X_FIRST'])+xsub[0]*float(r['X_STEP'])) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close()
def main(argv): disRas = 'no' if len(sys.argv)>2: try: opts, args = getopt.getopt(argv,"h:f:x:y:l:L:r:a:o:P:") except getopt.GetoptError: Usage(); sys.exit(1) for opt, arg in opts: if opt in ("-h","--help"): Usage(); sys.exit() elif opt == '-f': file = arg elif opt == '-x': xsub = [int(i) for i in arg.split(':')]; xsub.sort() elif opt == '-y': ysub = [int(i) for i in arg.split(':')]; ysub.sort() elif opt == '-l': latsub = [float(i) for i in arg.split(':')]; latsub.sort() elif opt == '-L': lonsub = [float(i) for i in arg.split(':')]; lonsub.sort() elif opt == '-r': mli_rg = int(arg) elif opt == '-a': mli_az = int(arg) elif opt == '-o': outname = arg elif opt == '-P': disRas = arg try: file except: Usage(); sys.exit(1) elif len(sys.argv)==2: file = argv[0] else: Usage(); sys.exit(1) ############################################################ ext = os.path.splitext(file)[1] outname='subset_'+file try: parContents = readfile.read_rsc_file(file + '.rsc') width = int(parContents['WIDTH']) length = int(parContents['FILE_LENGTH']) except: parContents = readfile.read_par_file(file + '.par') width = int(parContents['range_samples:']) length = int(parContents['azimuth_lines:']) # subset try: ysub if ysub[1] > length: ysub[1]=length; print 'ysub[1] > length! Set ysub[1]=length='+str(length) except: ysub=[0,length] print 'no subset in y direction' try: xsub if xsub[1] > width: xsub[1]=width; print 'xsub[1] > width! Set xsub[1]=width='+str(width) except: xsub=[0,width] print 'no subset in x direction' if (ysub[1]-ysub[0])*(xsub[1]-xsub[0]) < length*width: subsetCmd='subset.py -f '+file+' -x '+str(xsub[0])+':'+str(xsub[1])+' -y '+str(ysub[0])+':'+str(ysub[1])+' -o '+outname print subsetCmd os.system(subsetCmd) else: outname = file print 'No subset.' # generate .ras file if ext == '.mli': try: mli_rg except: mli_rg=1 try: mli_az except: mli_az=1 rasCmd='raspwr '+outname+' '+str(xsub[1]-xsub[0])+' 1 0 '+str(mli_rg)+' '+str(mli_az)+' 1. .35 1 - 0' print rasCmd os.system(rasCmd) elif ext in ('.slc','.SLC'): try: mli_rg except: mli_rg=1 try: mli_az except: mli_az=2 rasCmd='rasSLC '+outname+' '+str(xsub[1]-xsub[0])+' 1 0 '+str(mli_rg)+' '+str(mli_az)+' 1. .35 1 1' print rasCmd os.system(rasCmd) else: print 'Not recognized file extension!' Usage(); sys.exit(1) # display .ras file if disRas in ('yes','Yes','Y','y','YES'): disCmd = 'display '+outname+'.ras' print disCmd os.system(disCmd)
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': file = arg elif opt == '-t': filtType = arg elif opt == '-p': par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_' + filtType + ext try: par except: par = [] print '+++++++++++++++++++++++++++' print 'Filter type : ' + filtType print 'parameters : ' + str(par) print '+++++++++++++++++++++++++++' ############################################### if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Filtering the interferograms in space' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unwSet = h5file['interferograms'][igram].get(igram) unw = unwSet[0:unwSet.shape[0], 0:unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=mask, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Filtering the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d dset1 = h5file['timeseries'].get(d) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Mask, compression='gzip') except: print 'Filterd file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'filtering the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): #outName='subsetIgrams.h5' try: opts, args = getopt.getopt(argv, "h:f:x:y:o:l:L:") except getopt.GetoptError: print 'Error while getting args' Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': File = arg elif opt == '-y': ysub = [int(i) for i in arg.split(':')] ysub.sort() elif opt == '-x': xsub = [int(i) for i in arg.split(':')] xsub.sort() elif opt == '-o': outName = arg elif opt == '-l': Latsub = [float(i) for i in arg.split(':')] Latsub.sort() elif opt == '-L': Lonsub = [float(i) for i in arg.split(':')] Lonsub.sort() ##################################################### try: File xsub ysub except: try: File Latsub Lonsub except: Usage() sys.exit(1) try: outName except: outName = 'subset_' + File ext = os.path.splitext(File)[1] if ext == '.h5': try: h5file = h5py.File(File, 'r') except: Usage() sys.exit(1) k = h5file.keys() # convert LatLon to xy for geocoded file try: Latsub Lonsub if 'X_FIRST' in h5file[k[0]].attrs.keys(): xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(h5file[k[0]].attrs['X_FIRST'])) / float(h5file[k[0]].attrs['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(h5file[k[0]].attrs['X_FIRST'])) / float(h5file[k[0]].attrs['X_STEP'])) ysub[0] = int( (Latsub[1] - float(h5file[k[0]].attrs['Y_FIRST'])) / float(h5file[k[0]].attrs['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(h5file[k[0]].attrs['Y_FIRST'])) / float(h5file[k[0]].attrs['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' elif 'X_FIRST' in h5file[k[0]][h5file[k[0]].keys()[0]].attrs.keys( ): # for geocoded interferograms/coherence igramList = h5file[k[0]].keys() xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(h5file[k[0]][igramList[0]].attrs['X_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(h5file[k[0]][igramList[0]].attrs['X_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['X_STEP'])) ysub[0] = int( (Latsub[1] - float(h5file[k[0]][igramList[0]].attrs['Y_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(h5file[k[0]][igramList[0]].attrs['Y_FIRST'])) / float(h5file[k[0]][igramList[0]].attrs['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' else: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 # k=h5file.keys() if 'interferograms' in k: igramList = h5file['interferograms'].keys() h5out = h5py.File(outName, 'w') gg = h5out.create_group('interferograms') for igram in igramList: print igram dset1 = h5file['interferograms'][igram].get(igram) group = gg.create_group(igram) dset = group.create_dataset(igram, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file['interferograms'][igram].attrs.keys(): group.attrs['X_FIRST'] = float( h5file['interferograms'] [igram].attrs['X_FIRST']) + xsub[0] * float( h5file['interferograms'][igram].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file['interferograms'] [igram].attrs['Y_FIRST']) + ysub[0] * float( h5file['interferograms'][igram].attrs['Y_STEP']) gm = h5out.create_group('mask') try: Mset = h5file['mask'].get('mask') dset = gm.create_dataset('mask', data=Mset[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') except: print 'No group for mask found! It may cause problem in other processing steps.' try: Cset = h5file['meanCoherence'].get('meanCoherence') gm = h5out.create_group('meanCoherence') dset = gm.create_dataset('meanCoherence', data=Cset[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') except: print 'No average coherence found in the File' elif k[0] in ('coherence', 'wrapped'): corList = h5file[k[0]].keys() h5out = h5py.File(outName, 'w') gg = h5out.create_group(k[0]) for cor in corList: print cor dset1 = h5file[k[0]][cor].get(cor) group = gg.create_group(cor) dset = group.create_dataset(cor, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file[k[0]][cor].attrs.keys(): group.attrs['X_FIRST'] = float( h5file[k[0]][cor].attrs['X_FIRST']) + xsub[0] * float( h5file[k[0]][cor].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file[k[0]][cor].attrs['Y_FIRST']) + ysub[0] * float( h5file[k[0]][cor].attrs['Y_STEP']) elif 'timeseries' in h5file.keys(): dateList = h5file['timeseries'].keys() h5out = h5py.File(outName, 'w') group = h5out.create_group('timeseries') for d in dateList: print d dset1 = h5file['timeseries'].get(d) dset = group.create_dataset(d, data=dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]], compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[0] group.attrs['WIDTH'] = shape(dset1[ysub[0]:ysub[1], xsub[0]:xsub[1]])[1] group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file['timeseries'].attrs.keys(): group.attrs['X_FIRST'] = float( h5file['timeseries'].attrs['X_FIRST']) + xsub[0] * float( h5file['timeseries'].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file['timeseries'].attrs['Y_FIRST']) + ysub[0] * float( h5file['timeseries'].attrs['Y_STEP']) h5file.close() h5out.close() elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys( ) or 'rmse' in h5file.keys(): print 'writing >>> ' + outName dset = h5file[k[0]].get(k[0]) data = dset[ysub[0]:ysub[1], xsub[0]:xsub[1]] hfout = h5py.File(outName, 'w') group = hfout.create_group(k[0]) group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value group.attrs['FILE_LENGTH'] = data.shape[0] group.attrs['WIDTH'] = data.shape[1] group.attrs['XMIN'] = 0 group.attrs['XMAX'] = data.shape[1] - 1 group.attrs['YMIN'] = 0 group.attrs['YMAX'] = data.shape[0] - 1 group.attrs['subset_x0'] = xsub[0] group.attrs['subset_x1'] = xsub[1] group.attrs['subset_y0'] = ysub[0] group.attrs['subset_y1'] = ysub[1] if 'X_FIRST' in h5file[k[0]].attrs.keys(): group.attrs['X_FIRST'] = float( h5file[k[0]].attrs['X_FIRST']) + xsub[0] * float( h5file[k[0]].attrs['X_STEP']) group.attrs['Y_FIRST'] = float( h5file[k[0]].attrs['Y_FIRST']) + ysub[0] * float( h5file[k[0]].attrs['Y_STEP']) h5file.close() hfout.close() elif ext in ['.unw', '.cor', '.hgt']: a, p, r = readfile.read_float32(File) try: Latsub Lonsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 a = a[ysub[0]:ysub[1], xsub[0]:xsub[1]] p = p[ysub[0]:ysub[1], xsub[0]:xsub[1]] print 'writing >>> ' + outName writefile.write_float32(p, outName) r['FILE_LENGTH'] = str(p.shape[0]) r['WIDTH'] = str(p.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.dem': d, r = readfile.read_dem(File) try: Latsub Lonsub # print Latsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 d = d[ysub[0]:ysub[1], xsub[0]:xsub[1]] print 'writing >>> ' + outName writefile.write_dem(d, outName) r['FILE_LENGTH'] = str(d.shape[0]) r['WIDTH'] = str(d.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(File) try: r = readfile.read_rsc_file(File + '.rsc') except: sys.exit(1) try: Latsub Lonsub try: r['X_FIRST'] xsub = [0] * 2 ysub = [0] * 2 xsub[0] = int( (Lonsub[0] - float(r['X_FIRST'])) / float(r['X_STEP'])) xsub[1] = int( (Lonsub[1] - float(r['X_FIRST'])) / float(r['X_STEP'])) ysub[0] = int( (Latsub[1] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) ysub[1] = int( (Latsub[0] - float(r['Y_FIRST'])) / float(r['Y_STEP'])) print 'Subseting geocoded', ext, ' file with Latitude and Longitude...' except: print 'Not geocoded file, cannot be subseted with LatLon.' Usage() sys.exit(1) except: Geo = 0 box = (xsub[0], ysub[0], xsub[1], ysub[1]) output_img = im.crop(box) print 'writing >>> ' + outName output_img.save(outName) # try: # r=readfile.read_rsc_file(File+'.rsc') # except: # sys.exit(1) r['FILE_LENGTH'] = str(ysub[1] - ysub[0]) r['WIDTH'] = str(xsub[1] - xsub[0]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) r['subset_x0'] = str(xsub[0]) r['subset_x1'] = str(xsub[1]) r['subset_y0'] = str(ysub[0]) r['subset_y1'] = str(ysub[1]) try: r['Y_FIRST'] = str( float(r['Y_FIRST']) + ysub[0] * float(r['Y_STEP'])) r['X_FIRST'] = str( float(r['X_FIRST']) + xsub[0] * float(r['X_STEP'])) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close()
def main(argv): try: file=argv[0] alks=float(argv[1]) rlks=float(argv[2]) except: Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks'+ext if ext == '.int' or ext == '.slc': a,p,r = readfile.read_complex64(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a,p,r = readfile.read_float32(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) writefile.write_float32(plks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.dem'): d,r = readfile.read_dem(file) dlks=multilook(d,alks,rlks) print 'writing '+outName writefile.write_dem(dlks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r=readfile.read_rsc_file(file+'.rsc') except: sys.exit(1) r['FILE_LENGTH']=str(height) r['WIDTH']=str(width) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.h5'): h5file=h5py.File(file,'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks=h5py.File(outName,'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList=h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks=multilook(unw,alks,rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) dset1=h5file['mask'].get('mask') mask=dset1[0:dset1.shape[0],0:dset1.shape[1]] masklks=multilook(mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList=h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks=multilook(unw,alks,rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k=h5file.keys() print 'multi looking the '+ k[0] group=h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH']=Masklks.shape[1] group.attrs['FILE_LENGTH']=Masklks.shape[0] h5file.close() h5file_lks.close()