def main(argv): try: opts, args = getopt.getopt(argv, "h:f:t:p:") except getopt.GetoptError: usage() sys.exit(1) if opts == []: usage() sys.exit(1) for opt, arg in opts: if opt in ("-h", "--help"): usage() sys.exit() elif opt == '-f': file = arg elif opt == '-t': filtType = arg elif opt == '-p': par = arg ext = os.path.splitext(file)[1] outName = file.split('.')[0] + '_' + filtType + ext try: par except: par = [] #print '+++++++++++++++++++++++++++' print 'Filter type : ' + filtType print 'parameters : ' + str(par) #print '+++++++++++++++++++++++++++' ############################################### if ext == '.int' or ext == '.slc': a, p, r = readfile.read_complex_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a, p, r = readfile.read_float32(file) plks = multilook(p, alks, rlks) alks = multilook(a, alks, rlks) writefile.write_float32(plks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.dem'): d, r = readfile.read_real_int16(file) dlks = multilook(d, alks, rlks) print 'writing ' + outName writefile.write_real_int16(dlks, outName) r['FILE_LENGTH'] = str(dlks.shape[0]) r['WIDTH'] = str(dlks.shape[1]) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext in ['.jpeg', 'jpg', 'png']: im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r = readfile.read_roipac_rsc(file + '.rsc') except: sys.exit(1) r['FILE_LENGTH'] = str(height) r['WIDTH'] = str(width) r['XMAX'] = str(int(r['WIDTH']) - 1) r['YMAX'] = str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP'] = str(float(r['Y_STEP']) * alks) r['X_STEP'] = str(float(r['X_STEP']) * rlks) except: Geo = 0 f = open(outName + '.rsc', 'w') for k in r.keys(): f.write(k + ' ' + r[k] + '\n') f.close() elif ext == ('.h5'): h5file = h5py.File(file, 'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks = h5py.File(outName, 'w') if 'interferograms' in h5file.keys(): print 'Filtering the interferograms in space' gg = h5file_lks.create_group('interferograms') igramList = h5file['interferograms'].keys() for igram in igramList: print igram unwSet = h5file['interferograms'][igram].get(igram) unw = unwSet[0:unwSet.shape[0], 0:unwSet.shape[1]] unw = filter(unw, filtType, par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file['interferograms'][ igram].attrs.iteritems(): group.attrs[key] = value dset1 = h5file['mask'].get('mask') mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=mask, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Filtering the time-series' group = h5file_lks.create_group('timeseries') dateList = h5file['timeseries'].keys() for d in dateList: print d dset1 = h5file['timeseries'].get(d) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(d, data=data, compression='gzip') for key, value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group = h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Mask, compression='gzip') except: print 'Filterd file does not include the maske' elif 'temporal_coherence' in h5file.keys( ) or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k = h5file.keys() print 'filtering the ' + k[0] group = h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0:dset1.shape[0], 0:dset1.shape[1]] data = filter(data, filtType, par) dset = group.create_dataset(k[0], data=data, compression='gzip') for key, value in h5file[k[0]].attrs.iteritems(): group.attrs[key] = value h5file.close() h5file_lks.close() print 'writing >>> ' + outName
def main(argv): try: opts, args = getopt.getopt(argv,"h:f:t:p:") except getopt.GetoptError: Usage() ; sys.exit(1) if opts==[]: Usage() ; sys.exit(1) for opt,arg in opts: if opt in ("-h","--help"): Usage(); sys.exit() elif opt == '-f': file = arg elif opt == '-t': filtType = arg elif opt == '-p': par = arg ext = os.path.splitext(file)[1] outName=file.split('.')[0]+'_'+filtType+ext try: par except: par=[] #print '+++++++++++++++++++++++++++' print 'Filter type : '+filtType print 'parameters : ' + str(par) #print '+++++++++++++++++++++++++++' ############################################### if ext == '.int' or ext == '.slc': a,p,r = readfile.read_complex_float32(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == '.unw' or ext == '.cor' or ext == '.hgt': a,p,r = readfile.read_float32(file) plks=multilook(p,alks,rlks) alks=multilook(a,alks,rlks) writefile.write_float32(plks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.dem'): d,r = readfile.read_real_int16(file) dlks=multilook(d,alks,rlks) print 'writing '+outName writefile.write_real_int16(dlks,outName) r['FILE_LENGTH']=str(dlks.shape[0]) r['WIDTH']=str(dlks.shape[1]) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext in ['.jpeg','jpg','png']: import Image im = Image.open(file) width = im.size[0] / int(rlks) height = im.size[1] / int(alks) imlks = im.resize((width, height), Image.NEAREST) print 'writing ' + outName imlks.save(outName) try: r=readfile.read_roipac_rsc(file+'.rsc') except: sys.exit(1) r['FILE_LENGTH']=str(height) r['WIDTH']=str(width) r['XMAX']=str(int(r['WIDTH']) - 1) r['YMAX']=str(int(r['FILE_LENGTH']) - 1) try: r['Y_STEP']=str(float(r['Y_STEP'])*alks) r['X_STEP']=str(float(r['X_STEP'])*rlks) except: Geo=0 f = open(outName+'.rsc','w') for k in r.keys(): f.write(k+' '+r[k]+'\n') f.close() elif ext == ('.h5'): h5file=h5py.File(file,'r') # outName=file.split('.')[0]+'_a'+str(int(alks))+'lks_r'+str(int(rlks))+'lks.h5' h5file_lks=h5py.File(outName,'w') if 'interferograms' in h5file.keys(): print 'Filtering the interferograms in space' gg = h5file_lks.create_group('interferograms') igramList=h5file['interferograms'].keys() for igram in igramList: print igram unwSet = h5file['interferograms'][igram].get(igram) unw=unwSet[0:unwSet.shape[0],0:unwSet.shape[1]] unw=filter(unw,filtType,par) group = gg.create_group(igram) dset = group.create_dataset(igram, data=unw, compression='gzip') for key, value in h5file['interferograms'][igram].attrs.iteritems(): group.attrs[key] = value dset1=h5file['mask'].get('mask') mask=dset1[0:dset1.shape[0],0:dset1.shape[1]] group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=mask, compression='gzip') elif 'timeseries' in h5file.keys(): print 'Filtering the time-series' group = h5file_lks.create_group('timeseries') dateList=h5file['timeseries'].keys() for d in dateList: print d dset1 = h5file['timeseries'].get(d) data=dset1[0:dset1.shape[0],0:dset1.shape[1]] data=filter(data,filtType,par) dset = group.create_dataset(d, data=data, compression='gzip') for key,value in h5file['timeseries'].attrs.iteritems(): group.attrs[key] = value try: dset1 = h5file['mask'].get('mask') Mask = dset1[0:dset1.shape[0],0:dset1.shape[1]] # Masklks=multilook(Mask,alks,rlks) group=h5file_lks.create_group('mask') dset = group.create_dataset('mask', data=Mask, compression='gzip') except: print 'Filterd file does not include the maske' elif 'temporal_coherence' in h5file.keys() or 'velocity' in h5file.keys() or 'mask' in h5file.keys(): k=h5file.keys() print 'filtering the '+ k[0] group=h5file_lks.create_group(k[0]) dset1 = h5file[k[0]].get(k[0]) data = dset1[0:dset1.shape[0],0:dset1.shape[1]] data = filter(data,filtType,par) dset = group.create_dataset(k[0], data=data, compression='gzip') for key , value in h5file[k[0]].attrs.iteritems(): group.attrs[key]=value h5file.close() h5file_lks.close() print 'writing >>> '+outName
def main(argv): try: templateFile = argv[1] except: Usage() sys.exit(1) from pysar._pysar_utilities import check_variable_name templateContents = readfile.read_template(templateFile) projectName = os.path.basename(templateFile).partition(".")[0] try: processProjectDir = argv[2] tssarProjectDir = argv[3] except: if os.getenv("PARENTDIR"): processProjectDir = os.getenv("SCRATCHDIR") + "/" + projectName + "/PROCESS" tssarProjectDir = os.getenv("SCRATCHDIR") + "/" + projectName + "/TSSAR" else: processProjectDir = os.getenv("PROCESSDIR") + "/" + projectName tssarProjectDir = os.getenv("TSSARDIR") + "/" + projectName print "\n*************** Loading Data into PySAR ****************" print "PROCESS directory: " + processProjectDir print "TSSAR directory: " + tssarProjectDir if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir) ########### Use defaults if paths not given in template file ######### import h5py import numpy as np optionName = {} optionName["interferograms"] = "pysar.inputFiles" optionName["coherence"] = "pysar.corFiles" optionName["wrapped"] = "pysar.wrappedFiles" optionName["geomap"] = "pysar.geomap" optionName["demGeo"] = "pysar.dem.geoCoord" optionName["demRdr"] = "pysar.dem.radarCoord" try: igramPath = templateContents["pysar.inputFiles"] igramPath = check_variable_name(igramPath) except: igramPath = processProjectDir + "/DONE/IFGRAM*/filt_*.unw" print "Path pattern for unwrapped interferogram: " + igramPath # except: igramPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.unw' try: corPath = templateContents["pysar.corFiles"] corPath = check_variable_name(corPath) except: corPath = processProjectDir + "/DONE/IFGRAM*/filt_*rlks.cor" print "Path pattern for coherence: " + corPath try: wrapPath = templateContents["pysar.wrappedFiles"] wrapPath = check_variable_name(wrapPath) except: wrapPath = processProjectDir + "/DONE/IFGRAM*/filt_*rlks.int" print "Path pattern for wrapped interferogram: " + wrapPath # try: demRdrPath = templateContents['pysar.dem.radarCoord']; demRdrPath = check_variable_name(demRdrPath) # except: # demRdrList=glob.glob(demRdrPath) ########################################################################### ######################### Unwrapped Interferograms ######################## try: if os.path.isfile(tssarProjectDir + "/LoadedData.h5"): print "\nLoadedData.h5" + " already exists." sys.exit(1) igramList = glob.glob(igramPath) igramList = sorted(igramList) k = "interferograms" check_number(k, optionName[k], igramList) # number check print "loading interferograms ..." igramList, mode_width, mode_length = check_size(k, igramList) # size check igramList = sorted(igramList) h5file = tssarProjectDir + "/LoadedData.h5" f = h5py.File(h5file, "w") gg = f.create_group("interferograms") MaskZero = np.ones([int(mode_length), int(mode_width)]) for igram in igramList: if not os.path.basename(igram) in f: print "Adding " + igram group = gg.create_group(os.path.basename(igram)) amp, unw, unwrsc = readfile.read_float32(igram) MaskZero *= amp dset = group.create_dataset(os.path.basename(igram), data=unw, compression="gzip") for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc["DATE12"].split("-") baseline_file = os.path.dirname(igram) + "/" + d1 + "_" + d2 + "_baseline.rsc" baseline = readfile.read_roipac_rsc(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value group.attrs["PROJECT_NAME"] = projectName group.attrs["UNIT"] = "radian" else: print os.path.basename(h5file) + " already contains " + os.path.basename(igram) Mask = np.ones([int(mode_length), int(mode_width)]) Mask[MaskZero == 0] = 0 # gm = f.create_group('mask') # dset = gm.create_dataset('mask', data=Mask, compression='gzip') f.close() ############## Mask file ############### print "writing to Mask.h5\n" # Mask=np.ones([int(mode_length),int(mode_width)]) # Mask[MaskZero==0]=0 h5file = tssarProjectDir + "/Mask.h5" h5mask = h5py.File(h5file, "w") group = h5mask.create_group("mask") dset = group.create_dataset(os.path.basename("mask"), data=Mask, compression="gzip") for key, value in unwrsc.iteritems(): group.attrs[key] = value h5mask.close() except: print "No unwrapped interferogram is loaded.\n" ######################################################################## ############################# Coherence ################################ try: if os.path.isfile(tssarProjectDir + "/Coherence.h5"): print "\nCoherence.h5" + " already exists." sys.exit(1) corList = glob.glob(corPath) corList = sorted(corList) k = "coherence" check_number(k, optionName[k], corList) # number check print "loading coherence files ..." corList, mode_width, mode_length = check_size(k, corList) # size check corList = sorted(corList) h5file = tssarProjectDir + "/Coherence.h5" fcor = h5py.File(h5file, "w") gg = fcor.create_group("coherence") meanCoherence = np.zeros([int(mode_length), int(mode_width)]) for cor in corList: if not os.path.basename(cor) in fcor: print "Adding " + cor group = gg.create_group(os.path.basename(cor)) amp, unw, unwrsc = readfile.read_float32(cor) meanCoherence += unw dset = group.create_dataset(os.path.basename(cor), data=unw, compression="gzip") for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc["DATE12"].split("-") baseline_file = os.path.dirname(cor) + "/" + d1 + "_" + d2 + "_baseline.rsc" baseline = readfile.read_roipac_rsc(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value group.attrs["PROJECT_NAME"] = projectName group.attrs["UNIT"] = "1" else: print os.path.basename(h5file) + " already contains " + os.path.basename(cor) # fcor.close() ########### mean coherence file ############### meanCoherence = meanCoherence / (len(corList)) print "writing meanCoherence group to the coherence h5 file" gc = fcor.create_group("meanCoherence") dset = gc.create_dataset("meanCoherence", data=meanCoherence, compression="gzip") print "writing average_spatial_coherence.h5\n" h5file_CorMean = tssarProjectDir + "/average_spatial_coherence.h5" fcor_mean = h5py.File(h5file_CorMean, "w") group = fcor_mean.create_group("mask") dset = group.create_dataset(os.path.basename("mask"), data=meanCoherence, compression="gzip") for key, value in unwrsc.iteritems(): group.attrs[key] = value fcor_mean.close() fcor.close() except: print "No correlation file is loaded.\n" ############################################################################## ########################## Wrapped Interferograms ############################ try: if os.path.isfile(tssarProjectDir + "/Wrapped.h5"): print "\nWrapped.h5" + " already exists." sys.exit(1) wrapList = glob.glob(wrapPath) wrapList = sorted(wrapList) k = "wrapped" check_number(k, optionName[k], wrapList) # number check print "loading wrapped phase ..." wrapList, mode_width, mode_length = check_size(k, wrapList) # size check wrapList = sorted(wrapList) h5file = tssarProjectDir + "/Wrapped.h5" fw = h5py.File(h5file, "w") gg = fw.create_group("wrapped") for wrap in wrapList: if not os.path.basename(wrap) in fw: print "Adding " + wrap group = gg.create_group(os.path.basename(wrap)) amp, unw, unwrsc = readfile.read_complex_float32(wrap) dset = group.create_dataset(os.path.basename(wrap), data=unw, compression="gzip") for key, value in unwrsc.iteritems(): group.attrs[key] = value d1, d2 = unwrsc["DATE12"].split("-") baseline_file = os.path.dirname(wrap) + "/" + d1 + "_" + d2 + "_baseline.rsc" baseline = readfile.read_roipac_rsc(baseline_file) for key, value in baseline.iteritems(): group.attrs[key] = value group.attrs["PROJECT_NAME"] = projectName group.attrs["UNIT"] = "radian" else: print os.path.basename(h5file) + " already contains " + os.path.basename(wrap) fw.close() print "Writed " + str(len(wrapList)) + " wrapped interferograms to " + h5file + "\n" except: print "No wrapped interferogram is loaded.\n" ############################################################################## ################################# geomap.trans ############################### try: geomapPath = tssarProjectDir + "/geomap*.trans" geomapList = glob.glob(geomapPath) if len(geomapList) > 0: print "\ngeomap*.trans" + " already exists." sys.exit(1) geomapPath = templateContents["pysar.geomap"] geomapPath = check_variable_name(geomapPath) geomapList = glob.glob(geomapPath) cpCmd = "cp " + geomapList[0] + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + geomapList[0] + ".rsc " + tssarProjectDir print cpCmd + "\n" os.system(cpCmd) except: # print "*********************************" print "no geomap file is loaded.\n" # print "*********************************\n" ############################################################################## ################################## DEM ##################################### try: demRdrPath = tssarProjectDir + "/radar*.hgt" demRdrList = glob.glob(demRdrPath) if len(demRdrList) > 0: print "\nradar*.hgt" + " already exists." sys.exit(1) demRdrPath = templateContents["pysar.dem.radarCoord"] demRdrPath = check_variable_name(demRdrPath) demRdrList = glob.glob(demRdrPath) cpCmd = "cp " + demRdrList[0] + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + demRdrList[0] + ".rsc " + tssarProjectDir print cpCmd + "\n" os.system(cpCmd) except: # print "*********************************" print "no DEM (radar coordinate) file is loaded.\n" # print "*********************************" try: demGeoPath = tssarProjectDir + "/*.dem" demGeoList = glob.glob(demGeoPath) if len(demGeoList) > 0: print "\n*.dem" + " already exists." sys.exit(1) demGeoPath = templateContents["pysar.dem.geoCoord"] demGeoPath = check_variable_name(demGeoPath) demGeoList = glob.glob(demGeoPath) cpCmd = "cp " + demGeoList[0] + " " + tssarProjectDir print cpCmd os.system(cpCmd) cpCmd = "cp " + demGeoList[0] + ".rsc " + tssarProjectDir print cpCmd + "\n" os.system(cpCmd) except: # print "*********************************" print "no DEM (geo coordinate) file is loaded.\n"