def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv('TSSARDIR') +'/'+projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv('SCRATCHDIR') + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath=templateContents['pysar.inputdata'] igramPath=check_variable_name(igramPath) except: igramPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath=templateContents['pysar.CorFiles'] corPath=check_variable_name(corPath) except: corPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath=templateContents['pysar.wrapped'] wrapPath=check_variable_name(wrapPath) except: wrapPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir+'/LoadedData.h5'): print '\nLoadedData.h5'+ ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k,igramList) # number check igramList,mode_width,mode_length = check_size(k,igramList) # size check h5file = tssarProjectDir+'/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero=np.ones([int(mode_length),int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp,unw,unwrsc = readfile.read_float32(igram) MaskZero=amp*MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(igram)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(igram) Mask=np.ones([int(mode_length),int(mode_width)]) Mask[MaskZero==0]=0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file,'w') group=h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir+'/Coherence.h5'): print '\nCoherence.h5'+ ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k,corList) # number check corList,mode_width,mode_length = check_size(k,corList) # size check h5file = tssarProjectDir+'/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence=np.zeros([int(mode_length),int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp,unw,unwrsc = readfile.read_float32(cor) meanCoherence=meanCoherence+unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(cor)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(cor) meanCoherence=meanCoherence/(len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir+'/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean,'w') group=fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir+'/Wrapped.h5'): print '\nWrapped.h5'+ ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k,wrapList) # number check wrapList,mode_width,mode_length = check_size(k,wrapList) # size check h5file = tssarProjectDir+'/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp,unw,unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key,value in unwrsc.iteritems(): group.attrs[key] = value d1,d2=unwrsc['DATE12'].split('-') baseline_file=os.path.dirname(wrap)+'/'+d1+'_'+d2+'_baseline.rsc' baseline=readfile.read_rsc_file(baseline_file) for key,value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename(h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed '+str(len(wrapList))+' wrapped interferograms to '+h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile=templateContents['pysar.geomap'] geomapFile=check_variable_name(geomapFile) cpCmd="cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd="cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == "-f": file = arg elif opt == "-t": filtType = arg elif opt == "-p": par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split(".")[0] + "_" + filtType + ext try: par except: par = [] print "+++++++++++++++++++++++++++" print "Filter type : " + filtType print "parameters : " + str(par) print "+++++++++++++++++++++++++++" ############################################### if ext == ".int" or ext == ".slc": a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == ".unw" or ext == ".cor" or ext == ".hgt": a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".dem"): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print "writing " + outName writefile.write_dem(dlks, outName) r["FILE_LENGTH"] = str(dlks.shape[0]) r["WIDTH"] = str(dlks.shape[1]) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext in [".jpeg", "jpg", "png"]: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print "writing " + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + ".rsc") except: sys.exit(1) r["FILE_LENGTH"] = str(height) r["WIDTH"] = str(width) r["XMAX"] = str(int(r["WIDTH"]) - 1) r["YMAX"] = str(int(r["FILE_LENGTH"]) - 1) try: r["Y_STEP"] = str(float(r["Y_STEP"]) * alks) r["X_STEP"] = str(float(r["X_STEP"]) * rlks) except: Geo = 0 f = open(outName + ".rsc", "w") for k in r.keys(): f.write(k + " " + r[k] + "\n") f.close() elif ext == (".h5"): h5file = h5py.File(file, "r") # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, "w") if "interferograms" in h5file.keys(): print "Filtering the interferograms in space" gg = h5file_lks.create_group("interferograms") igramList = h5file["interferograms"].keys() for igram in igramList: print igram unwSet = h5file["interferograms"][igram].get(igram) unw = unwSet[0 : unwSet.shape[0], 0 : unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression="gzip") for key, value in h5file["interferograms"][igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file["mask"].get("mask") mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=mask, compression="gzip") elif "timeseries" in h5file.keys(): print "Filtering the time-series" group = h5file_lks.create_group("timeseries") dateList = h5file["timeseries"].keys() for d in dateList: print d dset1 = h5file["timeseries"].get(d) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression="gzip") for key, value in h5file["timeseries"].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file["mask"].get("mask") Mask = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group("mask") dset = group.create_dataset("mask", data=Mask, compression="gzip") except: print "Filterd file does not include the maske" elif "temporal_coherence" in h5file.keys() or "velocity" in h5file.keys() or "mask" in h5file.keys(): k = h5file.keys() print "filtering the " + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0 : dset1.shape[0], 0 : dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression="gzip") for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): try: file = argv[0] alks = float(argv[1]) rlks = float(argv[2]) except: Usage() sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_a' + str(int(alks)) + 'lks_r' + str( int(rlks)) + 'lks' + ext if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks = multilook(unw, alks, rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] masklks = multilook(mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks = multilook(unw, alks, rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwlks.shape[1] group.attrs['FILE_LENGTH'] = unwlks.shape[0] try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'multi looking the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] Masklks = multilook(Mask, alks, rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value try: group.attrs['Y_STEP'] = alks * float(group.attrs['Y_STEP']) group.attrs['X_STEP'] = rlks * float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE'] = alks * float( group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE'] = rlks * float( group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH'] = Masklks.shape[1] group.attrs['FILE_LENGTH'] = Masklks.shape[0] h5file.close() h5file_lks.close()
def main(argv): try: templateFile = argv[1] except: print ''' ******************************************* loading the processed data for PySAR: interferograms (unwrapped and wrapped) coherence files (generate Mask at the same time) Usage: load_data.py TEMPLATEFILE ******************************************* ''' sys.exit(1) templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile.partition('.')[0]) ############# Assign workubf directory ############################## try: tssarProjectDir = os.getenv( 'TSSARDIR' ) + '/' + projectName # use TSSARDIR if environment variable exist except: tssarProjectDir = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + "/TSSAR" # FA 7/2015: adopted for new directory structure print "QQ " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### try: igramPath = templateContents['pysar.inputdata'] igramPath = check_variable_name(igramPath) except: igramPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*c10.unw' try: corPath = templateContents['pysar.CorFiles'] corPath = check_variable_name(corPath) except: corPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.cor' try: wrapPath = templateContents['pysar.wrapped'] wrapPath = check_variable_name(wrapPath) except: wrapPath = os.getenv( 'SCRATCHDIR' ) + '/' + projectName + '/PROCESS/DONE/I*/filt_*0*sim_HDR_*rlks.int' ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir + '/LoadedData.h5'): print '\nLoadedData.h5' + ' already exists.\n' sys.exit(1) print 'loading interferograms ...' igramList = glob.glob(igramPath) k = 'interferograms' check_number(k, igramList) # number check igramList, mode_width, mode_length = check_size( k, igramList) # size check h5file = tssarProjectDir + '/LoadedData.h5' f = h5py.File(h5file) gg = f.create_group('interferograms') MaskZero = np.ones([int(mode_length), int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print 'Adding ' + igram group = gg.create_group(os.path.basename(igram)) amp, unw, unwrsc = readfile.read_float32(igram) MaskZero = amp * MaskZero dset = group.create_dataset(os.path.basename(igram), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( igram) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(igram) Mask = np.ones([int(mode_length), int(mode_width)]) Mask[MaskZero == 0] = 0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ######################################################################## print 'writing to Mask.h5' h5file = 'Mask.h5' h5mask = h5py.File(h5file, 'w') group = h5mask.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip') h5mask.close() except: print 'No unwrapped interferogram is loaded.\n' ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir + '/Coherence.h5'): print '\nCoherence.h5' + ' already exists.\n' sys.exit(1) print 'loading corelation files ...' corList = glob.glob(corPath) k = 'coherence' check_number(k, corList) # number check corList, mode_width, mode_length = check_size(k, corList) # size check h5file = tssarProjectDir + '/Coherence.h5' fcor = h5py.File(h5file) gg = fcor.create_group('coherence') meanCoherence = np.zeros([int(mode_length), int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print 'Adding ' + cor group = gg.create_group(os.path.basename(cor)) amp, unw, unwrsc = readfile.read_float32(cor) meanCoherence = meanCoherence + unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( cor) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(cor) meanCoherence = meanCoherence / (len(corList)) print '********************************' print 'writing average_spatial_coherence.h5' h5file_CorMean = tssarProjectDir + '/average_spatial_coherence.h5' fcor_mean = h5py.File(h5file_CorMean, 'w') group = fcor_mean.create_group('mask') dset = group.create_dataset(os.path.basename('mask'), data=meanCoherence, compression='gzip') fcor_mean.close() print '********************************' print 'writing meanCoherence group to the interferogram file' gc = fcor.create_group('meanCoherence') dset = gc.create_dataset('meanCoherence', data=meanCoherence, compression='gzip') print '********************************' fcor.close() except: print 'No correlation file is loaded.\n' ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir + '/Wrapped.h5'): print '\nWrapped.h5' + ' already exists.\n' sys.exit(1) print 'loading wrapped phase ...' wrapList = glob.glob(wrapPath) k = 'wrapped' check_number(k, wrapList) # number check wrapList, mode_width, mode_length = check_size(k, wrapList) # size check h5file = tssarProjectDir + '/Wrapped.h5' fw = h5py.File(h5file) gg = fw.create_group('wrapped') for wrap in wrapList: if not os.path.basename(wrap) in fw: print 'Adding ' + wrap group = gg.create_group(os.path.basename(wrap)) amp, unw, unwrsc = readfile.read_complex64(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression='gzip') for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc['DATE12'].split('-') baseline_file = os.path.dirname( wrap) + '/' + d1 + '_' + d2 + '_baseline.rsc' baseline = readfile.read_rsc_file(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value else: print os.path.basename( h5file) + " already contains " + os.path.basename(wrap) fw.close() print 'Writed ' + str( len(wrapList)) + ' wrapped interferograms to ' + h5file except: print 'No wrapped interferogram is loaded.\n' try: geomapFile = templateContents['pysar.geomap'] geomapFile = check_variable_name(geomapFile) cpCmd = "cp " + geomapFile + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + geomapFile + ".rsc " + tssarProjectDir print cpCmd os.system(cpCmd) except: print "*********************************" print "Warning: no geomap file given" print "*********************************"
def main(argv): try: file=argv[0] geomap=argv[1] except: Usage();sys.exit(1) fileName=os.path.basename(file).split('.')[0] h5file=h5py.File(file,'r') k=h5file.keys() if k[0] in ('velocity','temporal_coherence','mask','rmse') and 'timeseries' not in k: dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0],0:dset.shape[1]] outname=fileName+'.unw' print 'writing to roi_pac unw file format' writefile.write_float32(data,outname) f = open(outname+'.rsc','w') f.write('FILE_LENGTH '+str(data.shape[0])+'\n') f.write('WIDTH '+str(data.shape[1])+'\n') f.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd os.system(geoCmd) print 'reading geocoded file and write it to h5 format' amp,unw,unwrsc = readfile.read_float32('geo_'+outname) rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd f = h5py.File('geo_'+file,'w') group=f.create_group(k[0]) dset = group.create_dataset(k[0], data=unw, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value for key,value in unwrsc.iteritems(): group.attrs[key] = value f.close() h5file.close() elif 'timeseries' in k: print 'geocoding timeseries:' outname='epoch_temp.unw' f = h5py.File('geo_'+file,'w') group = f.create_group('timeseries') epochList=h5file['timeseries'].keys() for epoch in epochList: print 'geocoding '+epoch d = h5file['timeseries'].get(epoch) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f = open(outname+'.rsc','w') f.write('FILE_LENGTH '+str(data.shape[0])+'\n') f.write('WIDTH '+str(data.shape[1])+'\n') f.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) dset = group.create_dataset(epoch, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] =unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] =unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'interferograms': print 'geocoding interferograms:' outname='igram_temp.unw' f = h5py.File('geo_'+file,'w') gg = f.create_group('interferograms') igramList=h5file[k[0]].keys() for igram in igramList: print 'geocoding '+igram group = gg.create_group('geo_'+igram) d = h5file['interferograms'][igram].get(igram) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) if igram==igramList[0]: MaskZero=np.ones([unw.shape[0],unw.shape[1]]) MaskZero=amp*MaskZero dset = group.create_dataset('geo_'+igram, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value # for key,value in unwrsc.iteritems(): # group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] Mask=np.ones(MaskZero.shape) Mask[MaskZero==0]=0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() h5file.close() elif k[0] == 'coherence': print 'geocoding coherence:' outname='cor_temp.unw' f = h5py.File('geo_'+file,'w') gg = f.create_group(k[0]) corList=h5file[k[0]].keys() for cor in corList: print 'geocoding '+cor group = gg.create_group('geo_'+cor) d = h5file[k[0]][cor].get(cor) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_float32(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_float32('geo_'+outname) dset = group.create_dataset('geo_'+cor, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'wrapped': print 'geocoding wrapped interferograms:' outname='wrap_temp.int' f = h5py.File('geo_'+file,'w') gg = f.create_group(k[0]) wrapList=h5file[k[0]].keys() for wrap in wrapList: print 'geocoding '+wrap group = gg.create_group('geo_'+wrap) d = h5file[k[0]][wrap].get(wrap) data = d[0:d.shape[0],0:d.shape[1]] writefile.write_complex64(data,outname) f_temp = open(outname+'.rsc','w') f_temp.write('FILE_LENGTH '+str(data.shape[0])+'\n') f_temp.write('WIDTH '+str(data.shape[1])+'\n') f_temp.close() geoCmd='geocode.pl '+geomap+' '+outname+' geo_'+outname print geoCmd; os.system(geoCmd) print 'reading geocoded file and add it to '+'geo_'+file amp,unw,unwrsc = readfile.read_complex64('geo_'+outname) dset = group.create_dataset('geo_'+wrap, data=unw, compression='gzip') rmCmd='rm '+outname; os.system(rmCmd); print rmCmd rmCmd='rm '+outname+'.rsc'; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname; os.system(rmCmd); print rmCmd rmCmd='rm geo_'+outname+'.rsc'; os.system(rmCmd); print rmCmd for key,value in unwrsc.iteritems(): group.attrs[key] = value for key,value in h5file[k[0]][wrap].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close()
def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: Usage() sys.exit(1) if opts == []: Usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): Usage() sys.exit() elif opt == '-f': file = arg elif opt == '-t': filtType = arg elif opt == '-p': par = arg # try: # file=argv[0] # alks=float(argv[1]) # rlks=float(argv[2]) # except: # Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_' + filtType + ext try: par except: par = [] print '+++++++++++++++++++++++++++' print 'Filter type : ' + filtType print 'parameters : ' + str(par) print '+++++++++++++++++++++++++++' ############################################### if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex64(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_dem(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_dem(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_rsc_file(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Filtering the interferograms in space' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unwSet = h5file['interferograms'][igram].get(igram) unw = unwSet[0:unwSet.shape[0], 0:unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=mask, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Filtering the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d dset1 = h5file['timeseries'].get(d) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Mask, compression='gzip') except: print 'Filterd file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'filtering the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close()
def main(argv): try: file = argv[0] geomap = argv[1] except: Usage() sys.exit(1) fileName = os.path.basename(file).split('.')[0] h5file = h5py.File(file, 'r') k = h5file.keys() if k[0] in ('velocity', 'temporal_coherence', 'mask', 'rmse') and 'timeseries' not in k: dset = h5file[k[0]].get(k[0]) data = dset[0:dset.shape[0], 0:dset.shape[1]] outname = fileName + '.unw' print 'writing to roi_pac unw file format' writefile.write_float32(data, outname) f = open(outname + '.rsc', 'w') f.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f.write('WIDTH ' + str(data.shape[1]) + '\n') f.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and write it to h5 format' amp, unw, unwrsc = readfile.read_float32('geo_' + outname) rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd f = h5py.File('geo_' + file, 'w') group = f.create_group(k[0]) dset = group.create_dataset(k[0], data=unw, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value for key, value in unwrsc.iteritems(): group.attrs[key] = value f.close() h5file.close() elif 'timeseries' in k: print 'geocoding timeseries:' outname = 'epoch_temp.unw' f = h5py.File('geo_' + file, 'w') group = f.create_group('timeseries') epochList = h5file['timeseries'].keys() for epoch in epochList: print 'geocoding ' + epoch d = h5file['timeseries'].get(epoch) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f = open(outname + '.rsc', 'w') f.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f.write('WIDTH ' + str(data.shape[1]) + '\n') f.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) dset = group.create_dataset(epoch, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'interferograms': print 'geocoding interferograms:' outname = 'igram_temp.unw' f = h5py.File('geo_' + file, 'w') gg = f.create_group('interferograms') igramList = h5file[k[0]].keys() for igram in igramList: print 'geocoding ' + igram group = gg.create_group('geo_' + igram) d = h5file['interferograms'][igram].get(igram) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) if igram == igramList[0]: MaskZero = np.ones([unw.shape[0], unw.shape[1]]) MaskZero = amp * MaskZero dset = group.create_dataset('geo_' + igram, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file['interferograms'][igram].attrs.iteritems( ): group.attrs[key] = value # for key,value in unwrsc.iteritems(): # group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] Mask = np.ones(MaskZero.shape) Mask[MaskZero == 0] = 0 gm = f.create_group('mask') dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() h5file.close() elif k[0] == 'coherence': print 'geocoding coherence:' outname = 'cor_temp.unw' f = h5py.File('geo_' + file, 'w') gg = f.create_group(k[0]) corList = h5file[k[0]].keys() for cor in corList: print 'geocoding ' + cor group = gg.create_group('geo_' + cor) d = h5file[k[0]][cor].get(cor) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_float32(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_float32('geo_' + outname) dset = group.create_dataset('geo_' + cor, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file[k[0]][cor].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close() elif k[0] == 'wrapped': print 'geocoding wrapped interferograms:' outname = 'wrap_temp.int' f = h5py.File('geo_' + file, 'w') gg = f.create_group(k[0]) wrapList = h5file[k[0]].keys() for wrap in wrapList: print 'geocoding ' + wrap group = gg.create_group('geo_' + wrap) d = h5file[k[0]][wrap].get(wrap) data = d[0:d.shape[0], 0:d.shape[1]] writefile.write_complex64(data, outname) f_temp = open(outname + '.rsc', 'w') f_temp.write('FILE_LENGTH ' + str(data.shape[0]) + '\n') f_temp.write('WIDTH ' + str(data.shape[1]) + '\n') f_temp.close() geoCmd = 'geocode.pl ' + geomap + ' ' + outname + ' geo_' + outname print geoCmd os.system(geoCmd) print 'reading geocoded file and add it to ' + 'geo_' + file amp, unw, unwrsc = readfile.read_complex64('geo_' + outname) dset = group.create_dataset('geo_' + wrap, data=unw, compression='gzip') rmCmd = 'rm ' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm ' + outname + '.rsc' os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname os.system(rmCmd) print rmCmd rmCmd = 'rm geo_' + outname + '.rsc' os.system(rmCmd) print rmCmd for key, value in unwrsc.iteritems(): group.attrs[key] = value for key, value in h5file[k[0]][wrap].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH'] = unwrsc['WIDTH'] group.attrs['FILE_LENGTH'] = unwrsc['FILE_LENGTH'] f.close() h5file.close()
def main(argv): try: file=argv[0] alks=float(argv[1]) rlks=float(argv[2]) except: Usage();sys.exit(1) ext = os.path.splitext(file)[1] outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks'+ext if ext == '.int' or ext == '.slc': a,p,r = readfile.read_complex64(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a,p,r = readfile.read_float32(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) writefile.write_float32(plks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.dem'): d,r = readfile.read_dem(file) dlks=multilook(d,alks,rlks) print 'writing '+outName writefile.write_dem(dlks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r=readfile.read_rsc_file(file+'.rsc') except: sys.exit(1) r['FILE_LENGTH']=str(height) r['WIDTH']=str(width) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.h5'): h5file=h5py.File(file,'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks=h5py.File(outName,'w') if 'interferograms' in h5file.keys(): print 'Multilooking the interferograms' gg = h5file_lks.create_group('interferograms') igramList=h5file['interferograms'].keys() for igram in igramList: print igram unw = h5file['interferograms'][igram].get(igram) unwlks=multilook(unw,alks,rlks) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unwlks, compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) dset1=h5file['mask'].get('mask') mask=dset1[0:dset1.shape[0],0:dset1.shape[1]] masklks=multilook(mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=masklks, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Multilooking the time-series' group = h5file_lks.create_group('timeseries') dateList=h5file['timeseries'].keys() for d in dateList: print d unw = h5file['timeseries'].get(d) unwlks=multilook(unw,alks,rlks) dset = group.create_dataset(d, data=unwlks, compression='gzip') for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value group.attrs['WIDTH']=unwlks.shape[1] group.attrs['FILE_LENGTH']=unwlks.shape[0] try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Masklks, compression='gzip') except: print 'Multilooked file does not include the maske' elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k=h5file.keys() print 'multi looking the '+ k[0] group=h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] Masklks=multilook(Mask,alks,rlks) dset = group.create_dataset(k[0], data=Masklks, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value try: group.attrs['Y_STEP']=alks*float(group.attrs['Y_STEP']) group.attrs['X_STEP']=rlks*float(group.attrs['X_STEP']) except: group.attrs['AZIMUTH_PIXEL_SIZE']=alks*float(group.attrs['AZIMUTH_PIXEL_SIZE']) group.attrs['RANGE_PIXEL_SIZE']=rlks*float(group.attrs['RANGE_PIXEL_SIZE']) group.attrs['WIDTH']=Masklks.shape[1] group.attrs['FILE_LENGTH']=Masklks.shape[0] h5file.close() h5file_lks.close()