def main(iargs=None): '''Create overlap interferograms. ''' inps = cmdLineParse(iargs) if inps.overlap: masterSwathList = ut.getSwathList(os.path.join(inps.master, 'overlap')) slaveSwathList = ut.getSwathList(os.path.join(inps.slave, 'overlap')) else: masterSwathList = ut.getSwathList(inps.master) slaveSwathList = ut.getSwathList(inps.slave) swathList = list(sorted(set(masterSwathList + slaveSwathList))) for swath in swathList: IWstr = 'IW{0}'.format(swath) if inps.overlap: ifgdir = os.path.join(inps.interferogram, 'overlap', IWstr) else: ifgdir = os.path.join(inps.interferogram, IWstr) os.makedirs(ifgdir, exist_ok=True) ####Load relevant products if inps.overlap: topMaster = ut.loadProduct( os.path.join(inps.master, 'overlap', 'IW{0}_top.xml'.format(swath))) botMaster = ut.loadProduct( os.path.join(inps.master, 'overlap', 'IW{0}_bottom.xml'.format(swath))) topCoreg = ut.loadProduct( os.path.join(inps.slave, 'overlap', 'IW{0}_top.xml'.format(swath))) botCoreg = ut.loadProduct( os.path.join(inps.slave, 'overlap', 'IW{0}_bottom.xml'.format(swath))) else: topMaster = ut.loadProduct( os.path.join(inps.master, 'IW{0}.xml'.format(swath))) topCoreg = ut.loadProduct( os.path.join(inps.slave, 'IW{0}.xml'.format(swath))) if inps.overlap: coregdir = os.path.join(inps.slave, 'overlap', 'IW{0}'.format(swath)) else: coregdir = os.path.join(inps.slave, 'IW{0}'.format(swath)) topIfg = ut.coregSwathSLCProduct() topIfg.configure() if inps.overlap: botIfg = ut.coregSwathSLCProduct() botIfg.configure() minMaster = topMaster.bursts[0].burstNumber maxMaster = topMaster.bursts[-1].burstNumber minSlave = topCoreg.bursts[0].burstNumber maxSlave = topCoreg.bursts[-1].burstNumber minBurst = max(minSlave, minMaster) maxBurst = min(maxSlave, maxMaster) print('minSlave,maxSlave', minSlave, maxSlave) print('minMaster,maxMaster', minMaster, maxMaster) print('minBurst, maxBurst: ', minBurst, maxBurst) for ii in range(minBurst, maxBurst + 1): ####Process the top bursts master = topMaster.bursts[ii - minMaster] slave = topCoreg.bursts[ii - minSlave] print('matching burst numbers: ', master.burstNumber, slave.burstNumber) mastername = master.image.filename slavename = slave.image.filename if inps.overlap: rdict = { 'rangeOff1': os.path.join(inps.master, 'overlap', IWstr, 'range_top_%02d_%02d.off' % (ii, ii + 1)), 'rangeOff2': os.path.join(inps.slave, 'overlap', IWstr, 'range_top_%02d_%02d.off' % (ii, ii + 1)), 'azimuthOff': os.path.join(inps.slave, 'overlap', IWstr, 'azimuth_top_%02d_%02d.off' % (ii, ii + 1)) } intname = os.path.join( ifgdir, '%s_top_%02d_%02d.int' % (inps.intprefix, ii, ii + 1)) else: rdict = { 'rangeOff1': os.path.join(inps.master, IWstr, 'range_%02d.off' % (ii)), 'rangeOff2': os.path.join(inps.slave, IWstr, 'range_%02d.off' % (ii)), 'azimuthOff1': os.path.join(inps.slave, IWstr, 'azimuth_%02d.off' % (ii)) } intname = os.path.join(ifgdir, '%s_%02d.int' % (inps.intprefix, ii)) ut.adjustCommonValidRegion(master, slave) fact = 4 * np.pi * slave.rangePixelSize / slave.radarWavelength intimage = multiply(mastername, slavename, intname, rdict['rangeOff1'], rdict['rangeOff2'], fact, master, flatten=inps.flatten) burst = copy.deepcopy(master) burst.image = intimage burst.burstNumber = ii topIfg.bursts.append(burst) if inps.overlap: ####Process the bottom bursts master = botMaster.bursts[ii - minMaster] slave = botCoreg.bursts[ii - minSlave] mastername = master.image.filename slavename = slave.image.filename # rdict = {'rangeOff' : os.path.join(coregdir, 'range_bot_%02d_%02d.off'%(ii,ii+1)), # 'azimuthOff': os.path.join(coregdir, 'azimuth_bot_%02d_%02d.off'%(ii,ii+1))} rdict = { 'rangeOff1': os.path.join(inps.master, 'overlap', IWstr, 'range_bot_%02d_%02d.off' % (ii, ii + 1)), 'rangeOff2': os.path.join(inps.slave, 'overlap', IWstr, 'range_bot_%02d_%02d.off' % (ii, ii + 1)), 'azimuthOff': os.path.join(inps.slave, 'overlap', IWstr, 'azimuth_bot_%02d_%02d.off' % (ii, ii + 1)) } print('rdict: ', rdict) ut.adjustCommonValidRegion(master, slave) intname = os.path.join( ifgdir, '%s_bot_%02d_%02d.int' % (inps.intprefix, ii, ii + 1)) fact = 4 * np.pi * slave.rangePixelSize / slave.radarWavelength #intimage = multiply(mastername, slavename, intname, # rdict['rangeOff'], fact, master, flatten=True) intimage = multiply(mastername, slavename, intname, rdict['rangeOff1'], rdict['rangeOff2'], fact, master, flatten=inps.flatten) burst = copy.deepcopy(master) burst.burstNumber = ii burst.image = intimage botIfg.bursts.append(burst) topIfg.numberOfBursts = len(topIfg.bursts) if hasattr(topCoreg, 'reference'): topIfg.reference = topCoreg.reference else: topIfg.reference = topMaster.reference print('Type: ', type(topIfg.reference)) if inps.overlap: ut.saveProduct(topIfg, ifgdir + '_top.xml') botIfg.numberOfBursts = len(botIfg.bursts) botIfg.reference = botCoreg.reference print(botIfg.reference) ut.saveProduct(botIfg, ifgdir + '_bottom.xml') else: ut.saveProduct(topIfg, ifgdir + '.xml')
def main(iargs=None): '''Compute baseline. ''' inps = cmdLineParse(iargs) from isceobj.Planet.Planet import Planet import numpy as np #swathList = self._insar.getInputSwathList(self.swaths) #commonBurstStartMasterIndex = [-1] * self._insar.numberOfSwaths #commonBurstStartSlaveIndex = [-1] * self._insar.numberOfSwaths #numberOfCommonBursts = [0] * self._insar.numberOfSwaths masterSwathList = ut.getSwathList(inps.master) slaveSwathList = ut.getSwathList(inps.slave) swathList = list(sorted(set(masterSwathList + slaveSwathList))) #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) baselineDir = os.path.dirname(inps.baselineFile) os.makedirs(baselineDir, exist_ok=True) f = open(inps.baselineFile, 'w') for swath in swathList: masterxml = os.path.join(inps.master, 'IW{0}.xml'.format(swath)) slavexml = os.path.join(inps.slave, 'IW{0}.xml'.format(swath)) if os.path.exists(masterxml) and os.path.exists(slavexml): master = ut.loadProduct( os.path.join(inps.master, 'IW{0}.xml'.format(swath))) slave = ut.loadProduct( os.path.join(inps.slave, 'IW{0}.xml'.format(swath))) minMaster = master.bursts[0].burstNumber maxMaster = master.bursts[-1].burstNumber minSlave = slave.bursts[0].burstNumber maxSlave = slave.bursts[-1].burstNumber minBurst = max(minSlave, minMaster) maxBurst = min(maxSlave, maxMaster) print('minSlave,maxSlave', minSlave, maxSlave) print('minMaster,maxMaster', minMaster, maxMaster) print('minBurst, maxBurst: ', minBurst, maxBurst) refElp = Planet(pname='Earth').ellipsoid Bpar = [] Bperp = [] for ii in range(minBurst, maxBurst + 1): ###Bookkeeping #commonBurstStartMasterIndex[swath-1] = minBurst #commonBurstStartSlaveIndex[swath-1] = commonSlaveIndex #numberOfCommonBursts[swath-1] = numberCommon #catalog.addItem('IW-{0} Number of bursts in master'.format(swath), master.numberOfBursts, 'baseline') #catalog.addItem('IW-{0} First common burst in master'.format(swath), minBurst, 'baseline') #catalog.addItem('IW-{0} Last common burst in master'.format(swath), maxBurst, 'baseline') #catalog.addItem('IW-{0} Number of bursts in slave'.format(swath), slave.numberOfBursts, 'baseline') #catalog.addItem('IW-{0} First common burst in slave'.format(swath), minBurst + burstOffset, 'baseline') #catalog.addItem('IW-{0} Last common burst in slave'.format(swath), maxBurst + burstOffset, 'baseline') #catalog.addItem('IW-{0} Number of common bursts'.format(swath), numberCommon, 'baseline') #refElp = Planet(pname='Earth').ellipsoid #Bpar = [] #Bperp = [] #for boff in [0, numberCommon-1]: ###Baselines at top of common bursts mBurst = master.bursts[ii - minMaster] sBurst = slave.bursts[ii - minSlave] ###Target at mid range tmid = mBurst.sensingMid rng = mBurst.midRange masterSV = mBurst.orbit.interpolate(tmid, method='hermite') target = mBurst.orbit.rdr2geo(tmid, rng) slvTime, slvrng = sBurst.orbit.geo2rdr(target) slaveSV = sBurst.orbit.interpolateOrbit(slvTime, method='hermite') targxyz = np.array( refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) mxyz = np.array(masterSV.getPosition()) mvel = np.array(masterSV.getVelocity()) sxyz = np.array(slaveSV.getPosition()) aa = np.linalg.norm(sxyz - mxyz) costheta = (rng * rng + aa * aa - slvrng * slvrng) / (2. * rng * aa) Bpar.append(aa * costheta) perp = aa * np.sqrt(1 - costheta * costheta) direction = np.sign( np.dot(np.cross(targxyz - mxyz, sxyz - mxyz), mvel)) Bperp.append(direction * perp) #catalog.addItem('IW-{0} Bpar at midrange for first common burst'.format(swath), Bpar[0], 'baseline') #catalog.addItem('IW-{0} Bperp at midrange for first common burst'.format(swath), Bperp[0], 'baseline') #catalog.addItem('IW-{0} Bpar at midrange for last common burst'.format(swath), Bpar[1], 'baseline') #catalog.addItem('IW-{0} Bperp at midrange for last common burst'.format(swath), Bperp[1], 'baseline') print('Bprep: ', Bperp) print('Bpar: ', Bpar) f.write('swath: IW{0}'.format(swath) + '\n') f.write('Bperp (average): ' + str(np.mean(Bperp)) + '\n') f.write('Bpar (average): ' + str(np.mean(Bpar)) + '\n') f.close()
def runGeocode(inps, prodlist, bbox, demfilename, is_offset_mode=False): '''Generalized geocoding of all the files listed above.''' from isceobj.Catalog import recordInputsAndOutputs logger.info("Geocoding Image") #insar = self._insar #if (not self.doInSAR) and (not is_offset_mode): # print('Skipping geocoding as InSAR processing has not been requested ....') # return #elif (not self.doDenseOffsets) and (is_offset_mode): # print('Skipping geocoding as Dense Offsets has not been requested ....') # return if isinstance(prodlist, str): from isceobj.Util.StringUtils import StringUtils as SU tobeGeocoded = SU.listify(prodlist) else: tobeGeocoded = prodlist #remove files that have not been processed newlist = [] for toGeo in tobeGeocoded: if os.path.exists(toGeo): newlist.append(toGeo) tobeGeocoded = newlist print('Number of products to geocode: ', len(tobeGeocoded)) if len(tobeGeocoded) == 0: print('No products found to geocode') return #swathList = self._insar.getValidSwathList(self.swaths) masterSwathList = ut.getSwathList(inps.master) slaveSwathList = ut.getSwathList(inps.slave) swathList = list(sorted(set(masterSwathList + slaveSwathList))) frames = [] for swath in swathList: #topMaster = ut.loadProduct(os.path.join(inps.master , 'IW{0}.xml'.format(swath))) referenceProduct = ut.loadProduct( os.path.join(inps.slave, 'IW{0}.xml'.format(swath))) #referenceProduct = insar.loadProduct( os.path.join(insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) frames.append(referenceProduct) orb = getMergedOrbit(frames) if bbox is None: bboxes = [] for frame in frames: bboxes.append(frame.getBbox()) snwe = [ min([x[0] for x in bboxes]), max([x[1] for x in bboxes]), min([x[2] for x in bboxes]), max([x[3] for x in bboxes]) ] else: snwe = list(bbox) if len(snwe) != 4: raise ValueError('Bounding box should be a list/tuple of length 4') ###Identify the 4 corners and dimensions topSwath = min(frames, key=lambda x: x.sensingStart) leftSwath = min(frames, key=lambda x: x.startingRange) ####Get required values from product burst = frames[0].bursts[0] t0 = topSwath.sensingStart dtaz = burst.azimuthTimeInterval r0 = leftSwath.startingRange dr = burst.rangePixelSize wvl = burst.radarWavelength planet = Planet(pname='Earth') ###Setup DEM #demfilename = self.verifyGeocodeDEM() demImage = isceobj.createDemImage() demImage.load(demfilename + '.xml') ###Catalog for tracking #catalog = isceobj.Catalog.createCatalog(insar.procDoc.name) #catalog.addItem('Dem Used', demfilename, 'geocode') #####Geocode one by one first = False ge = Geocodable() for prod in tobeGeocoded: objGeo = createGeozero() objGeo.configure() ####IF statements to check for user configuration objGeo.snwe = snwe objGeo.demImage = demImage objGeo.demCropFilename = os.path.join(os.path.dirname(demfilename), "dem.crop") if is_offset_mode: ### If using topsOffsetApp, image has been "pre-looked" by the objGeo.numberRangeLooks = inps.skipwidth ### skips in runDenseOffsets objGeo.numberAzimuthLooks = inps.skiphgt else: objGeo.numberRangeLooks = inps.numberRangeLooks objGeo.numberAzimuthLooks = inps.numberAzimuthLooks objGeo.lookSide = -1 #S1A is currently right looking only #create the instance of the input image and the appropriate #geocode method inImage, method = ge.create(prod) objGeo.method = method objGeo.slantRangePixelSpacing = dr objGeo.prf = 1.0 / dtaz objGeo.orbit = orb objGeo.width = inImage.getWidth() objGeo.length = inImage.getLength() objGeo.dopplerCentroidCoeffs = [0.] objGeo.radarWavelength = wvl if is_offset_mode: ### If using topsOffsetApp, as above, the "pre-looking" adjusts the range/time start objGeo.rangeFirstSample = r0 + (inps.offset_left - 1) * dr objGeo.setSensingStart(t0 + datetime.timedelta(seconds=( (inps.offset_top - 1) * dtaz))) else: objGeo.rangeFirstSample = r0 + ( (inps.numberRangeLooks - 1) / 2.0) * dr objGeo.setSensingStart(t0 + datetime.timedelta(seconds=(( (inps.numberAzimuthLooks - 1) / 2.0) * dtaz))) objGeo.wireInputPort(name='dem', object=demImage) objGeo.wireInputPort(name='planet', object=planet) objGeo.wireInputPort(name='tobegeocoded', object=inImage) objGeo.geocode() print('Geocoding: ', inImage.filename, 'geocode') print('Output file: ', inImage.filename + '.geo', 'geocode') print('Width', inImage.width, 'geocode') print('Length', inImage.length, 'geocode') print('Range looks', inps.numberRangeLooks, 'geocode') print('Azimuth looks', inps.numberAzimuthLooks, 'geocode') print('South', objGeo.minimumGeoLatitude, 'geocode') print('North', objGeo.maximumGeoLatitude, 'geocode') print('West', objGeo.minimumGeoLongitude, 'geocode') print('East', objGeo.maximumGeoLongitude, 'geocode')
def main(iargs=None): ''' Create coregistered overlap secondarys. ''' inps = cmdLineParse(iargs) referenceSwathList = ut.getSwathList(inps.reference) secondarySwathList = ut.getSwathList(inps.secondary) swathList = list(sorted(set(referenceSwathList+secondarySwathList))) #if os.path.abspath(inps.reference) == os.path.abspath(inps.secondary): # print('secondary is the same as reference, only performing subband filtering') for swath in swathList: ####Load secondary metadata reference = ut.loadProduct( os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) secondary = ut.loadProduct( os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) if os.path.exists(str(inps.misreg_az)): with open(inps.misreg_az, 'r') as f: misreg_az = float(f.readline()) else: misreg_az = 0.0 if os.path.exists(str(inps.misreg_rng)): with open(inps.misreg_rng, 'r') as f: misreg_rg = float(f.readline()) else: misreg_rg = 0.0 ###Output directory for coregistered SLCs outdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) offdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) os.makedirs(outdir, exist_ok=True) ####Indices w.r.t reference burstoffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary) secondaryBurstStart = minBurst + burstoffset secondaryBurstEnd = maxBurst relShifts = ut.getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) print('Shifts: ', relShifts) ####Can corporate known misregistration here apoly = Poly2D() apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) rpoly = Poly2D() rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) #slvCoreg = createTOPSSwathSLCProduct() slvCoreg = ut.coregSwathSLCProduct() slvCoreg.configure() for ii in range(minBurst, maxBurst): outname = os.path.join(outdir, 'burst_%02d.slc'%(ii+1)) outnameLower = os.path.splitext(outname)[0]+'_lower.slc' outnameUpper = os.path.splitext(outname)[0]+'_upper.slc' if os.path.exists(outnameLower) and os.path.exists(outnameLower+'.vrt') and os.path.exists(outnameLower+'.xml') and \ os.path.exists(outnameUpper) and os.path.exists(outnameUpper+'.vrt') and os.path.exists(outnameUpper+'.xml'): print('burst %02d already processed, skip...'%(ii+1)) continue jj = secondaryBurstStart + ii - minBurst masBurst = reference.bursts[ii] slvBurst = secondary.bursts[jj] #####Top burst processing try: offset = relShifts[jj] except: raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference'.format(jj)) ####Setup initial polynomials ### If no misregs are given, these are zero ### If provided, can be used for resampling without running to geo2rdr again for fast results rdict = {'azpoly' : apoly, 'rgpoly' : rpoly, 'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)), 'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))} ###For future - should account for azimuth and range misreg here .. ignoring for now. azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) rdict['carrPoly'] = azCarrPoly rdict['doppPoly'] = dpoly #subband filtering from Stack import ionParam from isceobj.Constants import SPEED_OF_LIGHT rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * slvBurst.rangePixelSize) ionParamObj=ionParam() ionParamObj.configure() lower_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_lower_tmp.slc' upper_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_upper_tmp.slc' outputfile = [lower_tmpfile, upper_tmpfile] bw = [ionParamObj.rgBandwidthSub / rangeSamplingRate, ionParamObj.rgBandwidthSub / rangeSamplingRate] bc = [-ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate] rgRef = ionParamObj.rgRef subband(slvBurst, 2, outputfile, bw, bc, rgRef, True) #resampling slvBurst.radarWavelength = ionParamObj.radarWavelengthLower slvBurst.image.filename = lower_tmpfile outnameSubband = outnameLower outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat)) slvBurst.radarWavelength = ionParamObj.radarWavelengthUpper slvBurst.image.filename = upper_tmpfile outnameSubband = outnameUpper outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat)) #remove original subband images os.remove(lower_tmpfile) os.remove(lower_tmpfile+'.vrt') os.remove(lower_tmpfile+'.xml') os.remove(upper_tmpfile) os.remove(upper_tmpfile+'.vrt') os.remove(upper_tmpfile+'.xml')
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) inps.geom_masterDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.master = os.path.join(inps.work_dir, pathObj.masterdir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_masterDir): os.mkdir(inps.geom_masterDir) time.sleep(putils.pause_seconds(inps.wait_time)) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name js.submit_script(job_name, job_file_name, sys.argv[:], inps.work_dir) sys.exit(0) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) if not iargs is None: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(iargs[:])) else: message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(sys.argv[1::])) demZero = create_demZero(inps.dem, inps.geom_masterDir) swathList = ut.getSwathList(inps.master) create_georectified_lat_lon(swathList, inps.master, inps.geom_masterDir, demZero) merge_burst_lat_lon(inps) multilook_images(inps) run_file_list = make_run_list(inps) for item in run_file_list: step_name = 'amplitude_ortho_geo' try: memorymax = config[step_name]['memory'] except: memorymax = config['DEFAULT']['memory'] try: if config[step_name]['adjust'] == 'True': walltimelimit = putils.walltime_adjust( config[step_name]['walltime']) else: walltimelimit = config[step_name]['walltime'] except: walltimelimit = config['DEFAULT']['walltime'] queuename = os.getenv('QUEUENAME') putils.remove_last_job_running_products(run_file=item) jobs = js.submit_batch_jobs(batch_file=item, out_dir=os.path.join( inps.work_dir, 'run_files'), work_dir=inps.work_dir, memory=memorymax, walltime=walltimelimit, queue=queuename) putils.remove_zero_size_or_length_error_files(run_file=item) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) #upload_to_s3(pic_dir) minsar.upload_data_products.main( [inps.custom_template_file, '--image_products']) return
def main(iargs=None): """ create orth and geo rectifying run jobs and submit them. """ inps = putils.cmd_line_parse(iargs) if 'stripmap' in inps.prefix: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'stripmapStack')) else: sys.path.append(os.path.join(os.getenv('ISCE_STACK'), 'topsStack')) from s1a_isce_utils import loadProduct, getSwathList import mergeBursts if not iargs is None: input_arguments = iargs else: input_arguments = sys.argv[1::] message_rsmas.log( inps.work_dir, os.path.basename(__file__) + ' ' + ' '.join(input_arguments)) inps.geom_referenceDir = os.path.join(inps.work_dir, pathObj.geomlatlondir) inps.reference = os.path.join(inps.work_dir, pathObj.referencedir) try: inps.dem = glob.glob('{}/DEM/*.wgs84'.format(inps.work_dir))[0] except: print('DEM not exists!') sys.exit(1) if not os.path.exists(inps.geom_referenceDir): os.mkdir(inps.geom_referenceDir) time.sleep(putils.pause_seconds(inps.wait_time)) inps.out_dir = os.path.join(inps.work_dir, 'run_files') job_obj = JOB_SUBMIT(inps) ######################################### # Submit job ######################################### if inps.submit_flag: job_name = 'export_ortho_geo' job_file_name = job_name if '--submit' in input_arguments: input_arguments.remove('--submit') command = [os.path.abspath(__file__)] + input_arguments job_obj.submit_script(job_name, job_file_name, command) pic_dir = os.path.join(inps.work_dir, pathObj.tiffdir) if not os.path.exists(pic_dir): os.mkdir(pic_dir) demZero = create_demZero(inps.dem, inps.geom_referenceDir) swathList = getSwathList(inps.reference) create_georectified_lat_lon(swathList, inps.reference, inps.geom_referenceDir, demZero, loadProduct) merge_burst_lat_lon(inps, mergeBursts) multilook_images(inps, mergeBursts) run_file_list = make_run_list(inps) for item in run_file_list: putils.remove_last_job_running_products(run_file=item) job_obj.write_batch_jobs(batch_file=item) job_status = job_obj.submit_batch_jobs(batch_file=item) if job_status: putils.remove_zero_size_or_length_error_files(run_file=item) putils.rerun_job_if_exit_code_140(run_file=item, inps_dict=inps) putils.raise_exception_if_job_exited(run_file=item) putils.concatenate_error_files(run_file=item, work_dir=inps.work_dir) putils.move_out_job_files_to_stdout(run_file=item) return
def main(iargs=None): ''' Create additional layers for performing ESD. ''' inps = cmdLineParse(iargs) inps.interferogram = os.path.join(inps.interferogram, 'overlap') inps.master = os.path.join(inps.master, 'overlap') inps.slave = os.path.join(inps.slave, 'overlap') masterSwathList = ut.getSwathList(inps.master) slaveSwathList = ut.getSwathList(inps.slave) swathList = list(sorted(set(masterSwathList + slaveSwathList))) for swath in swathList: IWstr = 'IW{0}'.format(swath) masterTop = ut.loadProduct( os.path.join(inps.master, IWstr + '_top.xml')) masterBot = ut.loadProduct( os.path.join(inps.master, IWstr + '_bottom.xml')) slaveTop = ut.loadProduct(os.path.join(inps.slave, IWstr + '_top.xml')) slaveBot = ut.loadProduct( os.path.join(inps.slave, IWstr + '_bottom.xml')) ####Load metadata for burst IFGs ifgTop = ut.loadProduct( os.path.join(inps.interferogram, IWstr + '_top.xml')) ifgBottom = ut.loadProduct( os.path.join(inps.interferogram, IWstr + '_bottom.xml')) ####Create ESD output directory esddir = os.path.join(inps.overlap, IWstr) if not os.path.isdir(esddir): os.makedirs(esddir) ####Overlap offsets directory masterOffdir = os.path.join(inps.master, IWstr) slaveOffdir = os.path.join(inps.slave, IWstr) ######### minMaster = masterTop.bursts[0].burstNumber maxMaster = masterTop.bursts[-1].burstNumber minSlave = slaveTop.bursts[0].burstNumber maxSlave = slaveTop.bursts[-1].burstNumber minBurst = ifgTop.bursts[0].burstNumber maxBurst = ifgTop.bursts[-1].burstNumber print('minSlave,maxSlave', minSlave, maxSlave) print('minMaster,maxMaster', minMaster, maxMaster) print('minBurst, maxBurst: ', minBurst, maxBurst) ######### ifglist = [] factorlist = [] offsetlist = [] cohlist = [] for ii in range(minBurst, maxBurst + 1): ind = ii - minBurst ###Index into overlaps mind = ii - minMaster ### Index into master sind = ii - minSlave ###Index into slave topBurstIfg = ifgTop.bursts[ind] botBurstIfg = ifgBottom.bursts[ind] ############### '''stackMasterTop = ifgTop.source.bursts[mind] stackMasterBot = ifgBottom.source.bursts[mind] dt = stackMasterTop.azimuthTimeInterval topStart = int(np.round((stackMasterBot.sensingStart - stackMasterTop.sensingStart).total_seconds() / dt)) #overlapLen = .numberOfLines botStart = stackMasterBot.firstValidLine #int(np.round((.sensingStart - masterBot.sensingStart).total_seconds() / dt)) print('+++++++++++++++++++') print(topStart, botStart) print('+++++++++++++++++++') ''' ############### ####Double difference interferograms topInt = np.memmap(topBurstIfg.image.filename, dtype=np.complex64, mode='r', shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) botInt = np.memmap(botBurstIfg.image.filename, dtype=np.complex64, mode='r', shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) intName = os.path.join(esddir, 'overlap_%02d.int' % (ii)) freqName = os.path.join(esddir, 'freq_%02d.bin' % (ii)) with open(intName, 'wb') as fid: fid.write(topInt * np.conj(botInt)) img = isceobj.createIntImage() img.setFilename(intName) img.setWidth(topBurstIfg.numberOfSamples) img.setLength(topBurstIfg.numberOfLines) img.setAccessMode('READ') img.renderHdr() img.renderVRT() img.createImage() img.finalizeImage() multIntName = multilook(intName, alks=inps.esdAzimuthLooks, rlks=inps.esdRangeLooks) ifglist.append(multIntName) ####Estimate coherence of double different interferograms multCor = createCoherence(multIntName) cohlist.append(multCor) ####Estimate the frequency difference azMasTop = os.path.join(masterOffdir, 'azimuth_top_%02d_%02d.off' % (ii, ii + 1)) rgMasTop = os.path.join(masterOffdir, 'range_top_%02d_%02d.off' % (ii, ii + 1)) azMasBot = os.path.join(masterOffdir, 'azimuth_bot_%02d_%02d.off' % (ii, ii + 1)) rgMasBot = os.path.join(masterOffdir, 'range_bot_%02d_%02d.off' % (ii, ii + 1)) azSlvTop = os.path.join(slaveOffdir, 'azimuth_top_%02d_%02d.off' % (ii, ii + 1)) rgSlvTop = os.path.join(slaveOffdir, 'range_top_%02d_%02d.off' % (ii, ii + 1)) azSlvBot = os.path.join(slaveOffdir, 'azimuth_bot_%02d_%02d.off' % (ii, ii + 1)) rgSlvBot = os.path.join(slaveOffdir, 'range_bot_%02d_%02d.off' % (ii, ii + 1)) mFullTop = masterTop.source.bursts[mind] mFullBot = masterBot.source.bursts[mind + 1] sFullTop = slaveTop.source.bursts[sind] sFullBot = slaveBot.source.bursts[sind + 1] freqdiff = overlapSpectralSeparation(topBurstIfg, botBurstIfg, mFullTop, mFullBot, sFullTop, sFullBot, azMasTop, rgMasTop, azMasBot, rgMasBot, azSlvTop, rgSlvTop, azSlvBot, rgSlvBot) with open(freqName, 'wb') as fid: (freqdiff * 2 * np.pi * mFullTop.azimuthTimeInterval).astype( np.float32).tofile(fid) img = isceobj.createImage() img.setFilename(freqName) img.setWidth(topBurstIfg.numberOfSamples) img.setLength(topBurstIfg.numberOfLines) img.setAccessMode('READ') img.bands = 1 img.dataType = 'FLOAT' # img.createImage() img.renderHdr() img.renderVRT() img.createImage() img.finalizeImage() multConstName = multilook(freqName, alks=inps.esdAzimuthLooks, rlks=inps.esdRangeLooks) factorlist.append(multConstName)
def main(iargs=None): ''' Merge burst products to make it look like stripmap. Currently will merge interferogram, lat, lon, z and los. ''' inps = cmdLineParse(iargs) virtual = inps.useVirtualFiles swathList = ut.getSwathList(inps.reference) referenceFrames = [] frames = [] fileList = [] namePattern = inps.namePattern.split('*') for swath in swathList: ifg = ut.loadProduct( os.path.join(inps.reference, 'IW{0}.xml'.format(swath))) if inps.stack: stack = ut.loadProduct( os.path.join(inps.stack, 'IW{0}.xml'.format(swath))) if inps.isaligned: reference = ifg.reference # checking inconsistent number of bursts in the secondary acquisitions if reference.numberOfBursts != ifg.numberOfBursts: raise ValueError( '{} has different number of bursts ({}) than the reference ({})' .format(inps.reference, ifg.numberOfBursts, reference.numberOfBursts)) else: reference = ifg minBurst = ifg.bursts[0].burstNumber maxBurst = ifg.bursts[-1].burstNumber if minBurst == maxBurst: print('Skipping processing of swath {0}'.format(swath)) continue if inps.stack: minStack = stack.bursts[0].burstNumber print( 'Updating the valid region of each burst to the common valid region of the stack' ) for ii in range(minBurst, maxBurst + 1): ifg.bursts[ii - minBurst].firstValidLine = stack.bursts[ ii - minStack].firstValidLine ifg.bursts[ii - minBurst].firstValidSample = stack.bursts[ ii - minStack].firstValidSample ifg.bursts[ii - minBurst].numValidLines = stack.bursts[ ii - minStack].numValidLines ifg.bursts[ii - minBurst].numValidSamples = stack.bursts[ ii - minStack].numValidSamples frames.append(ifg) referenceFrames.append(reference) print('bursts: ', minBurst, maxBurst) fileList.append([ os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s' % (x, namePattern[1])) for x in range(minBurst, maxBurst + 1) ]) mergedir = os.path.dirname(inps.outfile) os.makedirs(mergedir, exist_ok=True) suffix = '.full' if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks == 1): suffix = '' ####Virtual flag is ignored for multi-swath data if (not virtual): print('User requested for multi-swath stitching.') print('Virtual files are the only option for this.') print('Proceeding with virtual files.') mergeBurstsVirtual(frames, referenceFrames, fileList, inps.outfile + suffix, validOnly=inps.validOnly) if (not virtual): print('writing merged file to disk ...') cmd = 'gdal_translate -of ENVI -co INTERLEAVE=BIL ' + inps.outfile + suffix + '.vrt ' + inps.outfile + suffix os.system(cmd) print(inps.multilook) if inps.multilook: multilook(inps.outfile + suffix, outname=inps.outfile, alks=inps.numberAzimuthLooks, rlks=inps.numberRangeLooks, multilook_tool=inps.multilookTool, no_data=inps.noData) else: print('Skipping multi-looking ....')
def main(iargs=None): inps = cmdLineParse(iargs) ''' Estimate azimuth misregistration. ''' #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) #master = self._insar.loadProduct( self._insar.masterSlcProduct + '.xml' ) #minBurst, maxBurst = self._insar.commonMasterBurstLimits #slaveBurstStart, slaveBurstEnd = self._insar.commonSlaveBurstLimits esdPath = inps.esdDirname swathList = ut.getSwathList(esdPath) alks = inps.esdAzimuthLooks rlks = inps.esdRangeLooks #esdPath = esdPath.split() val = [] #for esddir in esdPath: for swath in swathList: esddir = os.path.join(esdPath, 'IW{0}'.format(swath)) freqFiles = glob.glob(os.path.join(esddir, 'freq_??.bin')) freqFiles.sort() minBurst = int(os.path.basename(freqFiles[0]).split('.')[0][-2:]) maxBurst = int(os.path.basename(freqFiles[-1]).split('.')[0][-2:]) maxBurst = maxBurst + 1 #maxBurst = maxBurst - 1 combIntName = os.path.join(esddir, 'combined.int') combFreqName = os.path.join(esddir, 'combined_freq.bin') combCorName = os.path.join(esddir, 'combined.cor') combOffName = os.path.join(esddir, 'combined.off') for ff in [combIntName, combFreqName, combCorName, combOffName]: if os.path.exists(ff): os.remove(ff) # val = [] lineCount = 0 for ii in range(minBurst, maxBurst): intname = os.path.join( esddir, 'overlap_%02d.%dalks_%drlks.int' % (ii, alks, rlks)) freqname = os.path.join( esddir, 'freq_%02d.%dalks_%drlks.bin' % (ii, alks, rlks)) corname = os.path.join( esddir, 'overlap_%02d.%dalks_%drlks.cor' % (ii, alks, rlks)) img = isceobj.createImage() img.load(intname + '.xml') width = img.getWidth() length = img.getLength() ifg = np.fromfile(intname, dtype=np.complex64).reshape((-1, width)) freq = np.fromfile(freqname, dtype=np.float32).reshape((-1, width)) cor = np.fromfile(corname, dtype=np.float32).reshape((-1, width)) with open(combIntName, 'ab') as fid: ifg.tofile(fid) with open(combFreqName, 'ab') as fid: freq.tofile(fid) with open(combCorName, 'ab') as fid: cor.tofile(fid) off = np.angle(ifg) / freq with open(combOffName, 'ab') as fid: off.astype(np.float32).tofile(fid) lineCount += length mask = (np.abs(ifg) > 0) * (cor > inps.esdCoherenceThreshold) vali = off[mask] val = np.hstack((val, vali)) img = isceobj.createIntImage() img.filename = combIntName img.setWidth(width) img.setAccessMode('READ') img.renderHdr() for fname in [combFreqName, combCorName, combOffName]: img = isceobj.createImage() img.bands = 1 img.scheme = 'BIP' img.dataType = 'FLOAT' img.filename = fname img.setWidth(width) img.setAccessMode('READ') img.renderHdr() if val.size == 0: raise Exception( 'Coherence threshold too strict. No points left for reliable ESD estimate' ) medianval = np.median(val) meanval = np.mean(val) stdval = np.std(val) hist, bins = np.histogram(val, 50, normed=1) center = 0.5 * (bins[:-1] + bins[1:]) debugplot = True try: import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt except: print('Matplotlib could not be imported. Skipping debug plot...') debugplot = False if debugplot: ####Plotting plt.figure() plt.bar(center, hist, align='center', width=0.7 * (bins[1] - bins[0])) plt.xlabel('Azimuth shift in pixels') plt.savefig(os.path.join(esddir, 'ESDmisregistration.png')) plt.close() # catalog.addItem('Median', medianval, 'esd') # catalog.addItem('Mean', meanval, 'esd') # catalog.addItem('Std', stdval, 'esd') # catalog.addItem('coherence threshold', self.esdCoherenceThreshold, 'esd') # catalog.addItem('number of coherent points', val.size, 'esd') # catalog.printToLog(logger, "runESD") # self._insar.procDoc.addAllFromCatalog(catalog) # slaveTimingCorrection = medianval * master.bursts[0].azimuthTimeInterval outputDir = os.path.dirname(inps.output) os.makedirs(outputDir, exist_ok=True) with open(inps.output, 'w') as f: f.write('median : ' + str(medianval) + '\n') f.write('mean : ' + str(meanval) + '\n') f.write('std : ' + str(stdval) + '\n') f.write('coherence threshold : ' + str(inps.esdCoherenceThreshold) + '\n') f.write('mumber of coherent points : ' + str(len(val)) + '\n')
def main(iargs=None): ''' Estimate offsets for the overlap regions of the bursts. ''' inps = cmdLineParse(iargs) # see if the user compiled isce with GPU enabled run_GPU = False try: from zerodop.GPUtopozero.GPUtopozero import PyTopozero from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr run_GPU = True except: pass if inps.useGPU and not run_GPU: print("GPU mode requested but no GPU ISCE code found") # setting the respective version of geo2rdr for CPU and GPU if run_GPU and inps.useGPU: print('GPU mode') runGeo2rdr = runGeo2rdrGPU else: print('CPU mode') runGeo2rdr = runGeo2rdrCPU masterSwathList = ut.getSwathList(inps.master) slaveSwathList = ut.getSwathList(inps.slave) swathList = list(sorted(set(masterSwathList + slaveSwathList))) for swath in swathList: ##Load slave metadata slave = ut.loadProduct( os.path.join(inps.slave, 'IW{0}.xml'.format(swath))) master = ut.loadProduct( os.path.join(inps.master, 'IW{0}.xml'.format(swath))) ### output directory if inps.overlap: outdir = os.path.join(inps.coregdir, inps.overlapDir, 'IW{0}'.format(swath)) else: outdir = os.path.join(inps.coregdir, 'IW{0}'.format(swath)) os.makedirs(outdir, exist_ok=True) if os.path.exists(str(inps.misreg_az)): with open(inps.misreg_az, 'r') as f: misreg_az = float(f.readline()) else: misreg_az = 0.0 if os.path.exists(str(inps.misreg_rng)): with open(inps.misreg_rng, 'r') as f: misreg_rg = float(f.readline()) else: misreg_rg = 0.0 burstoffset, minBurst, maxBurst = master.getCommonBurstLimits(slave) ###Burst indices w.r.t master if inps.overlap: maxBurst = maxBurst - 1 geomDir = os.path.join(inps.geom_masterDir, inps.overlapDir, 'IW{0}'.format(swath)) else: geomDir = os.path.join(inps.geom_masterDir, 'IW{0}'.format(swath)) slaveBurstStart = minBurst + burstoffset for mBurst in range(minBurst, maxBurst): ###Corresponding slave burst sBurst = slaveBurstStart + (mBurst - minBurst) burstTop = slave.bursts[sBurst] if inps.overlap: burstBot = slave.bursts[sBurst + 1] print( 'Overlap pair {0}: Burst {1} of master matched with Burst {2} of slave' .format(mBurst - minBurst, mBurst, sBurst)) if inps.overlap: ####Generate offsets for top burst rdict = { 'lat': os.path.join( geomDir, 'lat_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'lon': os.path.join( geomDir, 'lon_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'hgt': os.path.join( geomDir, 'hgt_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'rangeOffName': os.path.join( outdir, 'range_top_%02d_%02d.off' % (mBurst + 1, mBurst + 2)), 'azOffName': os.path.join( outdir, 'azimuth_top_%02d_%02d.off' % (mBurst + 1, mBurst + 2)) } runGeo2rdr(burstTop, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) print( 'Overlap pair {0}: Burst {1} of master matched with Burst {2} of slave' .format(mBurst - minBurst, mBurst + 1, sBurst + 1)) ####Generate offsets for bottom burst rdict = { 'lat': os.path.join( geomDir, 'lat_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'lon': os.path.join( geomDir, 'lon_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'hgt': os.path.join( geomDir, 'hgt_%02d_%02d.rdr' % (mBurst + 1, mBurst + 2)), 'rangeOffName': os.path.join( outdir, 'range_bot_%02d_%02d.off' % (mBurst + 1, mBurst + 2)), 'azOffName': os.path.join( outdir, 'azimuth_bot_%02d_%02d.off' % (mBurst + 1, mBurst + 2)) } runGeo2rdr(burstBot, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) else: print('Burst {1} of master matched with Burst {2} of slave'. format(mBurst - minBurst, mBurst, sBurst)) ####Generate offsets for top burst rdict = { 'lat': os.path.join(geomDir, 'lat_%02d.rdr' % (mBurst + 1)), 'lon': os.path.join(geomDir, 'lon_%02d.rdr' % (mBurst + 1)), 'hgt': os.path.join(geomDir, 'hgt_%02d.rdr' % (mBurst + 1)), 'rangeOffName': os.path.join(outdir, 'range_%02d.off' % (mBurst + 1)), 'azOffName': os.path.join(outdir, 'azimuth_%02d.off' % (mBurst + 1)) } runGeo2rdr(burstTop, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg)