def read_inps2dict(inps): """Read input Namespace object info into inpsDict It grab the following contents into inpsDict 1. inps & all template files 2. configurations: processor, autoPath, updateMode, compression 3. extra metadata: PLATFORM, PROJECT_NAME, 4. translate autoPath """ # Read input info into inpsDict inpsDict = vars(inps) inpsDict['PLATFORM'] = None # Read template file template = {} for fname in inps.template_file: temp = readfile.read_template(fname) temp = ut.check_template_auto_value(temp) template.update(temp) for key, value in template.items(): inpsDict[key] = value if 'processor' in template.keys(): template['mintpy.load.processor'] = template['processor'] prefix = 'mintpy.load.' key_list = [i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)] for key in key_list: value = template[prefix+key] if key in ['processor', 'autoPath', 'updateMode', 'compression']: inpsDict[key] = template[prefix+key] elif value: inpsDict[prefix+key] = template[prefix+key] print('processor : {}'.format(inpsDict['processor'])) if inpsDict['compression'] == False: inpsDict['compression'] = None # PROJECT_NAME --> PLATFORM if not inpsDict['PROJECT_NAME']: cfile = [i for i in list(inps.template_file) if os.path.basename(i) != 'smallbaselineApp.cfg'] inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(cfile)[1] msg = 'SAR platform/sensor : ' sensor_name = sensor.project_name2sensor_name(str(inpsDict['PROJECT_NAME']))[0] if sensor_name: msg += str(sensor_name) inpsDict['PLATFORM'] = str(sensor_name) else: msg += 'unknown from project name "{}"'.format(inpsDict['PROJECT_NAME']) print(msg) # update file path with auto if inpsDict.get('autoPath', False): print('use auto path defined in mintpy.defaults.auto_path for options in auto') inpsDict = auto_path.get_auto_path(processor=inpsDict['processor'], work_dir=os.path.dirname(inpsDict['outdir']), template=inpsDict) return inpsDict
def read_inps2dict(inps): """Read input Namespace object info into inpsDict""" # Read input info into inpsDict inpsDict = vars(inps) inpsDict['PLATFORM'] = None # Read template file template = {} for fname in inps.template_file: temp = readfile.read_template(fname) temp = ut.check_template_auto_value(temp) template.update(temp) for key, value in template.items(): inpsDict[key] = value if 'processor' in template.keys(): template['mintpy.load.processor'] = template['processor'] prefix = 'mintpy.load.' key_list = [ i.split(prefix)[1] for i in template.keys() if i.startswith(prefix) ] for key in key_list: value = template[prefix + key] if key in ['processor', 'updateMode', 'compression']: inpsDict[key] = template[prefix + key] elif value: inpsDict[prefix + key] = template[prefix + key] if inpsDict['compression'] == False: inpsDict['compression'] = None # PROJECT_NAME --> PLATFORM if not inpsDict['PROJECT_NAME']: cfile = [ i for i in list(inps.template_file) if os.path.basename(i) != 'smallbaselineApp.cfg' ] inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(cfile)[1] inpsDict['PLATFORM'] = str( sensor.project_name2sensor_name(str(inpsDict['PROJECT_NAME']))[0]) if inpsDict['PLATFORM']: print('SAR platform/sensor : {}'.format(inpsDict['PLATFORM'])) print('processor: {}'.format(inpsDict['processor'])) # Here to insert code to check default file path for miami user if (auto_path.autoPath and 'SCRATCHDIR' in os.environ and inpsDict['PROJECT_NAME'] is not None and inpsDict['mintpy.load.unwFile']) == 'auto': print(('check auto path setting for Univ of Miami users' ' for processor: {}'.format(inpsDict['processor']))) inpsDict = auto_path.get_auto_path( processor=inpsDict['processor'], project_name=inpsDict['PROJECT_NAME'], template=inpsDict) return inpsDict
def read_inps2dict(inps): """Read input Namespace object info into inpsDict""" # Read input info into inpsDict inpsDict = vars(inps) inpsDict['PLATFORM'] = None # Read template file template = {} for fname in inps.template_file: temp = readfile.read_template(fname) temp = ut.check_template_auto_value(temp) template.update(temp) for key, value in template.items(): inpsDict[key] = value if 'processor' in template.keys(): template['mintpy.load.processor'] = template['processor'] prefix = 'mintpy.load.' key_list = [ i.split(prefix)[1] for i in template.keys() if i.startswith(prefix) ] for key in key_list: value = template[prefix + key] if key in ['processor', 'autoPath', 'updateMode', 'compression']: inpsDict[key] = template[prefix + key] elif value: inpsDict[prefix + key] = template[prefix + key] if inpsDict['compression'] == False: inpsDict['compression'] = None # PROJECT_NAME --> PLATFORM if not inpsDict['PROJECT_NAME']: cfile = [ i for i in list(inps.template_file) if os.path.basename(i) != 'smallbaselineApp.cfg' ] inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(cfile)[1] inpsDict['PLATFORM'] = str( sensor.project_name2sensor_name(str(inpsDict['PROJECT_NAME']))[0]) print('SAR platform/sensor : {}'.format(inpsDict['PLATFORM'])) print('processor: {}'.format(inpsDict['processor'])) # update file path with auto if inpsDict['autoPath']: print( 'use auto path defined in mintpy.defaults.auto_path for options in auto' ) inpsDict = auto_path.get_auto_path(processor=inpsDict['processor'], work_dir=os.path.dirname( inpsDict['outdir']), template=inpsDict) return inpsDict
def read_inps2dict(inps): """Read input Namespace object info into inpsDict""" # Read input info into inpsDict inpsDict = vars(inps) inpsDict['PLATFORM'] = None # Read template file template = {} for fname in inps.template_file: temp = readfile.read_template(fname) temp = ut.check_template_auto_value(temp) template.update(temp) for key, value in template.items(): inpsDict[key] = value if 'processor' in template.keys(): template['mintpy.load.processor'] = template['processor'] prefix = 'mintpy.load.' key_list = [i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)] for key in key_list: value = template[prefix+key] if key in ['processor', 'updateMode', 'compression']: inpsDict[key] = template[prefix+key] elif value: inpsDict[prefix+key] = template[prefix+key] if inpsDict['compression'] == False: inpsDict['compression'] = None # PROJECT_NAME --> PLATFORM if not inpsDict['PROJECT_NAME']: cfile = [i for i in list(inps.template_file) if os.path.basename(i) != 'smallbaselineApp.cfg'] inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(cfile)[1] inpsDict['PLATFORM'] = str(sensor.project_name2sensor_name(str(inpsDict['PROJECT_NAME']))[0]) if inpsDict['PLATFORM']: print('platform : {}'.format(inpsDict['PLATFORM'])) print('processor: {}'.format(inpsDict['processor'])) # Here to insert code to check default file path for miami user if (auto_path.autoPath and 'SCRATCHDIR' in os.environ and inpsDict['PROJECT_NAME'] is not None): print(('check auto path setting for Univ of Miami users' ' for processor: {}'.format(inpsDict['processor']))) inpsDict = auto_path.get_auto_path(processor=inpsDict['processor'], project_name=inpsDict['PROJECT_NAME'], template=inpsDict) return inpsDict
def write_job_file(iDict): """Write job file to submit process_isce_stack.py as a job""" # command line cmd = 'process_isce_stack.py -t {}'.format(iDict['templateFile']) if iDict['startNum']: cmd += ' --start {} '.format(iDict['startNum']) if iDict['endNum']: cmd += ' --end {} '.format(iDict['endNum']) print('run the following command in bsub mode') print(cmd) # write job file job_dir = os.getcwd() job_file = os.path.join(job_dir, 'z_input_process_isce_stack.job') job_name = sensor.project_name2sensor_name(iDict['templateFile'])[1] with open(job_file, 'w') as f: f.write('#! /bin/tcsh\n') f.write('#BSUB -J {}\n'.format(job_name)) f.write('#BSUB -P insarlab\n') f.write('#BSUB -o z_output_{}.%J.o\n'.format(job_name)) f.write('#BSUB -e z_output_{}.%J.e\n'.format(job_name)) f.write('#BSUB -W {}\n'.format(iDict['walltime'])) f.write('#BSUB -q general\n') f.write('#BSUB -n 1\n') f.write('#BSUB -R "rusage[mem={}]"\n'.format(iDict['memory'])) if iDict['email']: f.write('#BSUB -u {}\n'.format(iDict['email'])) f.write('#BSUB -N\n') # write cd work directory f.write('\n') f.write('cd {}\n'.format(job_dir)) f.write('{}\n'.format(cmd)) print('finished writing job file: {}'.format(job_file)) return job_file
def write_to_one_file(outfile, dH, dV, atr, dLOS, atr_list, ref_file=None): """Write all datasets into one HDF5 file""" from mintpy.objects import sensor print('write all datasets into {}'.format(outfile)) length, width = dH.shape dsDict = {} for i in range(len(atr_list)): # auto dataset name atr = atr_list[i] dsName = sensor.project_name2sensor_name(atr['FILE_PATH'])[0] if atr['ORBIT_DIRECTION'].lower().startswith('asc'): dsName += 'A' else: dsName += 'D' if 'trackNumber' in atr.keys(): dsName += 'T{}'.format(atr['trackNumber']) dsName += '_{}'.format(atr['DATE12']) dsDict[dsName] = dLOS[i,:].reshape(length, width) dsDict['vertical'] = dV dsDict['horizontal'] = dH writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=ref_file) return outfile
def write_to_one_file(outfile, dH, dV, atr, dLOS_list, atr_list, ref_file=None): """Write all datasets into one HDF5 file""" print('write all datasets into {}'.format(outfile)) dsDict = {} for i in range(len(atr_list)): # auto dataset name atr_i = atr_list[i] dsName = sensor.project_name2sensor_name(atr_i['PROJECT_NAME'])[0] if atr['ORBIT_DIRECTION'].lower().startswith('asc'): dsName += 'A' else: dsName += 'D' if 'trackNumber' in atr_i.keys(): dsName += 'T{}'.format(atr_i['trackNumber']) dsName += '_{}'.format(atr_i['DATE12']) dsDict[dsName] = dLOS_list[i] dsDict['vertical'] = dV dsDict['horizontal'] = dH writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=ref_file) return outfile
def wait4jobs2finish(run_file, num_job): job_name = os.path.basename(run_file) file_pattern = 'z_output_{}.*.o'.format(job_name) proj_name = sensor.project_name2sensor_name(os.getcwd())[1] print('-'*50) print('sleeping until {} jobs are done for {}'.format(num_job, job_name)) t_sec = 0 num_file = len(glob.glob(file_pattern)) while num_file < num_job: time.sleep(1) #wait one second msg = '# of ' if proj_name: msg += '{}/'.format(proj_name) msg += '{} files: {} / {} after {} mins'.format(file_pattern, num_file, num_job, int(t_sec/60)) if t_sec <= 600: if t_sec % 60 == 0: print(msg) else: if t_sec % 300 == 0: print(msg) num_file = len(glob.glob(file_pattern)) t_sec += 1 print('-'*50) print('ALL {} jobs are done for {}'.format(num_job, job_name)) m, s = divmod(t_sec, 60); h, m = divmod(m, 60) print('Total used time: {:02d} hours {:02d} mins {:02d} secs'.format(h,m,s)) return
def write_job_file(iDict): """Write job file to submit process_isce_stack.py as a job""" # command line cmd = 'process_isce_stack.py -t {}'.format(iDict['templateFile']) if iDict['startNum']: cmd += ' --start {} '.format(iDict['startNum']) if iDict['endNum']: cmd += ' --end {} '.format(iDict['endNum']) print('run the following command in bsub mode') print(cmd) # write job file job_dir = os.getcwd() job_file = os.path.join(job_dir, 'z_input_process_isce_stack.job') job_name = sensor.project_name2sensor_name(iDict['templateFile'])[1] with open(job_file, 'w') as f: f.write('#! /bin/tcsh\n') f.write('#BSUB -J {}\n'.format(job_name)) f.write('#BSUB -P insarlab\n') f.write('#BSUB -o z_output_{}.%J.o\n'.format(job_name)) f.write('#BSUB -e z_output_{}.%J.e\n'.format(job_name)) f.write('#BSUB -W {}\n'.format(iDict['walltime'])) f.write('#BSUB -q general\n') f.write('#BSUB -n 1\n') f.write('#BSUB -R "rusage[mem={}]"\n'.format(iDict['memory'])) if iDict['email']: f.write('#BSUB -u {}\n'.format(iDict['email'])) f.write('#BSUB -N\n') # write cd work directory f.write('\n') f.write('cd {}\n'.format(job_dir)) f.write('{}\n'.format(cmd)) print('finished writing job file: {}'.format(job_file)) return job_file
def read_inps2dict(inps): print('read options from template file: '+os.path.basename(inps.templateFile)) template = readfile.read_template(inps.templateFile) template = ut.check_template_auto_value(template) iDict = vars(inps) key_prefix = 'isce.' key_list = [i.split(key_prefix)[1] for i in template.keys() if i.startswith(key_prefix)] for key in key_list: iDict[key] = template[key_prefix+key] iDict['sensor'], iDict['projectName'] = sensor.project_name2sensor_name(iDict['templateFile']) return iDict
def prepare_metadata4giant(fname, meta_files=None): """Extract metadata from xml files for GIAnT time-series file.""" # check xml files if not meta_files: meta_files = auto_xml_file4giant(fname) if not meta_files: raise FileNotFoundError("no xml file found.") # extract metadata from xml files rsc_files = [i for i in meta_files if i.endswith('.rsc')] xml_files = [i for i in meta_files if i.endswith('.xml')] xml_dict = {} for rsc_file in rsc_files: print('reading {}'.format(rsc_file)) rsc_dict = readfile.read_roipac_rsc(rsc_file) for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']: try: rsc_dict.pop(key) except: pass xml_dict.update(rsc_dict) for xml_file in xml_files: print('reading {}'.format(xml_file)) xml_dict.update(read_giant_xml(xml_file)) if not xml_dict: raise ValueError('No metadata found in file: ' + xml_file) # standardize metadata names xml_dict = readfile.standardize_metadata(xml_dict) # project name sensor_name, project_name = sensor.project_name2sensor_name( os.path.abspath(fname)) if sensor_name: xml_dict['PLATFORM'] = sensor_name if project_name: xml_dict['PROJECT_NAME'] = project_name if sensor_name in project_name: tmp = project_name.split(sensor_name)[1][0] if tmp == 'A': xml_dict['ORBIT_DIRECTION'] = 'ASCENDING' else: xml_dict['ORBIT_DIRECTION'] = 'DESCENDING' # update GIAnT HDF5 file fname = ut.add_attribute(fname, xml_dict, print_msg=True) return fname
def prepare_metadata4giant(fname, meta_files=None): """Extract metadata from xml files for GIAnT time-series file.""" # check xml files if not meta_files: meta_files = auto_xml_file4giant(fname) if not meta_files: raise FileNotFoundError("no xml file found.") # extract metadata from xml files rsc_files = [i for i in meta_files if i.endswith('.rsc')] xml_files = [i for i in meta_files if i.endswith('.xml')] xml_dict = {} for rsc_file in rsc_files: print('reading {}'.format(rsc_file)) rsc_dict = readfile.read_roipac_rsc(rsc_file) for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']: try: rsc_dict.pop(key) except: pass xml_dict.update(rsc_dict) for xml_file in xml_files: print('reading {}'.format(xml_file)) xml_dict.update(read_giant_xml(xml_file)) if not xml_dict: raise ValueError('No metadata found in file: '+xml_file) # standardize metadata names xml_dict = readfile.standardize_metadata(xml_dict) # project name sensor_name, project_name = sensor.project_name2sensor_name(os.path.abspath(fname)) if sensor_name: xml_dict['PLATFORM'] = sensor_name if project_name: xml_dict['PROJECT_NAME'] = project_name if sensor_name in project_name: tmp = project_name.split(sensor_name)[1][0] if tmp == 'A': xml_dict['ORBIT_DIRECTION'] = 'ASCENDING' else: xml_dict['ORBIT_DIRECTION'] = 'DESCENDING' # update GIAnT HDF5 file fname = ut.add_attribute(fname, xml_dict, print_msg=True) return fname
def read_inps2dict(inps): print('read options from template file: ' + os.path.basename(inps.templateFile)) template = readfile.read_template(inps.templateFile) template = ut.check_template_auto_value(template) iDict = vars(inps) key_list = [ i.split(key_prefix)[1] for i in template.keys() if i.startswith(key_prefix) ] for key in key_list: iDict[key] = template[key_prefix + key] # default values if options are not specified in the template file for key in AUTO_DICT.keys(): if key not in iDict.keys(): iDict[key] = AUTO_DICT[key] iDict['sensor'], iDict['projectName'] = sensor.project_name2sensor_name( iDict['templateFile']) # check if iDict['processor'] not in ['topsStack', 'stripmapStack']: msg = 'un-recognized ISCE-2 stack processor: {}'.format( iDict['processor']) msg += 'supported processors: [topsStack, stripmapStack]' raise ValueError(msg) # expand all paths to abspath for key in iDict.keys(): if key.endswith(('File', 'Dir')) and iDict[key]: iDict[key] = os.path.expanduser(iDict[key]) iDict[key] = os.path.expandvars(iDict[key]) iDict[key] = os.path.abspath(iDict[key]) # --text_cmd if iDict['processor'] == 'topsStack': iDict['text_cmd'] = 'export PATH=${PATH}:${ISCE_STACK}/topsStack' else: iDict['text_cmd'] = 'export PATH=${PATH}:${ISCE_STACK}/stripmapStack' return iDict
def run_asc_desc2horz_vert(inps): """Decompose asc / desc LOS files into horz / vert file(s). Parameters: inps - namespace, input parameters Returns: inps.outfile - str(s) output file(s) """ ## 1. calculate the overlaping area in lat/lon atr_list = [ readfile.read_attribute(fname, datasetName=inps.ds_name) for fname in inps.file ] S, N, W, E = get_overlap_lalo(atr_list) lat_step = float(atr_list[0]['Y_STEP']) lon_step = float(atr_list[0]['X_STEP']) length = int(round((S - N) / lat_step)) width = int(round((E - W) / lon_step)) print('overlaping area in SNWE: {}'.format((S, N, W, E))) ## 2. read LOS data and geometry num_file = len(inps.file) num_pixel = length * width dlos = np.zeros((num_file, length, width), dtype=np.float32) if inps.geom_file: los_inc_angle = np.zeros((num_file, length, width), dtype=np.float32) los_az_angle = np.zeros((num_file, length, width), dtype=np.float32) else: los_inc_angle = np.zeros(num_file, dtype=np.float32) los_az_angle = np.zeros(num_file, dtype=np.float32) for i, (atr, fname) in enumerate(zip(atr_list, inps.file)): # overlap SNWE --> box to read for each specific file coord = ut.coordinate(atr) x0 = coord.lalo2yx(W, coord_type='lon') y0 = coord.lalo2yx(N, coord_type='lat') box = (x0, y0, x0 + width, y0 + length) # read data dlos[i, :] = readfile.read(fname, box=box, datasetName=inps.ds_name)[0] msg = f'{inps.ds_name} ' if inps.ds_name else '' print(f'read {msg} from file: {fname}') # read geometry if inps.geom_file: los_inc_angle[i, :] = readfile.read( inps.geom_file[i], box=box, datasetName='incidenceAngle')[0] los_az_angle[i, :] = readfile.read(inps.geom_file[i], box=box, datasetName='azimuthAngle')[0] print( f'read 2D LOS incidence / azimuth angles from file: {inps.geom_file[i]}' ) else: los_inc_angle[i] = ut.incidence_angle(atr, dimension=0, print_msg=False) los_az_angle[i] = ut.heading2azimuth_angle(float(atr['HEADING'])) print( 'calculate the constant LOS incidence / azimuth angles from metadata as:' ) print(f'LOS incidence angle: {los_inc_angle[i]:.1f} deg') print(f'LOS azimuth angle: {los_az_angle[i]:.1f} deg') ## 3. decompose LOS displacements into horizontal / Vertical displacements print('---------------------') dhorz, dvert = asc_desc2horz_vert(dlos, los_inc_angle, los_az_angle, inps.horz_az_angle) ## 4. write outputs print('---------------------') # Update attributes atr = atr_list[0].copy() if inps.ds_name and atr['FILE_TYPE'] in [ 'ifgramStack', 'timeseries', 'HDFEOS' ]: atr['FILE_TYPE'] = 'displacement' atr['WIDTH'] = str(width) atr['LENGTH'] = str(length) atr['X_STEP'] = str(lon_step) atr['Y_STEP'] = str(lat_step) atr['X_FIRST'] = str(W) atr['Y_FIRST'] = str(N) # update REF_X/Y ref_lat, ref_lon = float(atr['REF_LAT']), float(atr['REF_LON']) [ref_y, ref_x] = ut.coordinate(atr).geo2radar(ref_lat, ref_lon)[0:2] atr['REF_Y'] = int(ref_y) atr['REF_X'] = int(ref_x) # use ref_file for time-series file writing ref_file = inps.file[0] if atr_list[0][ 'FILE_TYPE'] == 'timeseries' else None if inps.one_outfile: print('write asc/desc/horz/vert datasets into {}'.format( inps.one_outfile)) dsDict = {} for i, atr in enumerate(atr_list): # dataset name for LOS data track_num = atr.get('trackNumber', None) proj_name = atr.get('PROJECT_NAME', None) if proj_name in ['none', 'None', None]: proj_name = atr.get('FILE_PATH', None) proj_name = sensor.project_name2sensor_name(proj_name)[0] ds_name = proj_name if proj_name else '' ds_name += 'A' if atr['ORBIT_DIRECTION'].lower().startswith( 'asc') else 'D' ds_name += f'T{track_num}' if track_num else '' ds_name += '_{}'.format(atr['DATE12']) # assign dataset value dsDict[ds_name] = dlos[i] dsDict['horizontal'] = dhorz dsDict['vertical'] = dvert writefile.write(dsDict, out_file=inps.one_outfile, metadata=atr, ref_file=ref_file) else: print('writing horizontal component to file: ' + inps.outfile[0]) writefile.write(dhorz, out_file=inps.outfile[0], metadata=atr, ref_file=ref_file) print('writing vertical component to file: ' + inps.outfile[1]) writefile.write(dvert, out_file=inps.outfile[1], metadata=atr, ref_file=ref_file) return inps.outfile
def read_data(inps): """ Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN """ # metadata inps.metadata = readfile.read_attribute(inps.file) k = inps.metadata['FILE_TYPE'] inps.range2phase = -4. * np.pi / float(inps.metadata['WAVELENGTH']) # mask if inps.mask_file: inps.mask = readfile.read(inps.mask_file)[0] else: inps.mask = np.ones((int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])), dtype=np.bool_) # data if k in ['.unw','velocity']: inps.phase = readfile.read(inps.file)[0] if k == 'velocity': # velocity to displacement date1, date2 = inps.metadata['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] inps.phase *= (dt2 - dt1).days / 365.25 # displacement to phase inps.phase *= inps.range2phase # update mask to exclude pixel with NaN value inps.mask *= ~np.isnan(inps.phase) # set all masked out pixel to NaN inps.phase[inps.mask==0] = np.nan else: raise ValueError("input file not support yet: {}".format(k)) print('number of pixels: {}'.format(np.sum(inps.mask))) # change reference point if inps.ref_lalo: coord = ut.coordinate(inps.metadata) ref_lat, ref_lon = inps.ref_lalo ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2] # update data inps.phase -= inps.phase[ref_y, ref_x] # update metadata inps.metadata['REF_LAT'] = ref_lat inps.metadata['REF_LON'] = ref_lon inps.metadata['REF_Y'] = ref_y inps.metadata['REF_X'] = ref_x # read geometry inps.lat, inps.lon = ut.get_lat_lon(inps.metadata) inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0] inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING']) inps.height = readfile.read(inps.geom_file, datasetName='height')[0] # convert the height of ellipsoid to geoid (mean sea level) # ref: https://github.com/vandry/geoidheight if inps.ellipsoid2geoid: # import geoid module try: import geoid except: raise ImportError('Can not import geoidheight!') # calculate offset and correct height egm_file = os.path.join(os.path.dirname(geoid.__file__), 'geoids/egm2008-1.pgm') gh_obj = geoid.GeoidHeight(egm_file) h_offset = gh_obj.get(lat=np.nanmean(inps.lat), lon=np.nanmean(inps.lon)) inps.height -= h_offset # print message msg = 'convert height from ellipsoid to geoid' msg += '\n\tby subtracting a constant offset of {:.2f} m'.format(h_offset) print(msg) inps.lat[inps.mask==0] = np.nan inps.lon[inps.mask==0] = np.nan inps.inc_angle[inps.mask==0] = np.nan inps.head_angle[inps.mask==0] = np.nan inps.height[inps.mask==0] = np.nan # output filename if not inps.outfile: proj_name = sensor.project_name2sensor_name(inps.file)[1] if not proj_name: raise ValueError('No custom/auto output filename found.') inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12']) if not inps.outdir: inps.outdir = os.path.dirname(inps.file) inps.outfile = os.path.join(inps.outdir, inps.outfile) inps.outfile = os.path.abspath(inps.outfile) return
def read_data(inps): """ Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN """ # metadata inps.metadata = readfile.read_attribute(inps.file) k = inps.metadata['FILE_TYPE'] inps.range2phase = -4. * np.pi / float(inps.metadata['WAVELENGTH']) ext = os.path.splitext(inps.file)[1] # mask if inps.mask_file: inps.mask = readfile.read(inps.mask_file)[0] else: inps.mask = np.ones( (int(inps.metadata['LENGTH']), int(inps.metadata['WIDTH'])), dtype=np.bool_) # data if k in ['.unw', 'velocity']: inps.phase = readfile.read(inps.file)[0] if k == 'velocity': # velocity to displacement date1, date2 = inps.metadata['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] inps.phase *= (dt2 - dt1).days / 365.25 # displacement to phase inps.phase *= inps.range2phase # update mask to exclude pixel with NaN value inps.mask *= ~np.isnan(inps.phase) # set all masked out pixel to NaN inps.phase[inps.mask == 0] = np.nan else: raise ValueError("input file not support yet: {}".format(k)) print('number of pixels: {}'.format(np.sum(inps.mask))) # change reference point if inps.ref_lalo: coord = ut.coordinate(inps.metadata) ref_lat, ref_lon = inps.ref_lalo ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2] # update data inps.phase -= inps.phase[ref_y, ref_x] # update metadata inps.metadata['REF_LAT'] = ref_lat inps.metadata['REF_LON'] = ref_lon inps.metadata['REF_Y'] = ref_y inps.metadata['REF_X'] = ref_x # read geometry inps.lat, inps.lon = ut.get_lat_lon(inps.metadata) inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0] inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float( inps.metadata['HEADING']) inps.lat[inps.mask == 0] = np.nan inps.lon[inps.mask == 0] = np.nan inps.inc_angle[inps.mask == 0] = np.nan inps.head_angle[inps.mask == 0] = np.nan # output filename if not inps.outfile: out_dir = os.path.dirname(inps.file) proj_name = sensor.project_name2sensor_name(out_dir)[1] if not proj_name: raise ValueError('No custom/auto output filename found.') inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12']) inps.outfile = os.path.join(out_dir, inps.outfile) inps.outfile = os.path.abspath(inps.outfile) return
#ax.set_xlim(dt.date(2015, 1, 1), dt.date(2019, 12, 31)) ax.set_ylim([-20, 20]) ax.grid(linewidth=0.25, alpha=0.5) ofile = odir + '/' + sname + '.png' print(f'{i}: Saving file: {ofile}') fig.savefig(ofile, dpi=300, transparent=False, bbox_inches='tight') except: print(f'No GPS data for well {sname} or something went wrong.\n') # plt.show() if plot_temporal_corehence: work_dir = os.path.expanduser( '/scratch/hpham/insar/insar_pahrump/P173_F_470/TOPSTACK/mintpy/') os.chdir(work_dir) print('Go to directory', work_dir) proj_name = sensor.project_name2sensor_name(work_dir)[1] # spatialCoh vs tempCoh spatial_coh_file = 'avgSpatialCoherence.h5' # 'temporalCoherence.h5' with unw err cor temp_coh_file = 'UNW_COR/tempCoh_unwrapPhase.h5' water_mask_file = 'waterMask.h5' # lava flow #ts_file = 'geo/geo_timeseries_ECMWF_ramp_demErr.h5' #vel_file = 'geo/geo_velocity.h5' ts_file = 'geo/geo_timeseries_ERA5_ramp_demErr.h5' vel_file = 'geo/geo_velocity.h5' dem_file = '/scratch/hpham/insar/insar_pahrump/P173_F_470/ISCE/demLat_N35_N38_Lon_W117_W113.dem.wgs84' ifgram_file = 'inputs/ifgramStack.h5'
def read_template2inps(templateFile, inps=None): """Read network options from template file into Namespace variable inps""" if not inps: inps = cmd_line_parse() inpsDict = vars(inps) # Read template file template = readfile.read_template(templateFile) auto_file = os.path.join(os.path.dirname(mintpy.__file__), 'defaults/selectNetwork.cfg') template = ut.check_template_auto_value(template, auto_file=auto_file) if not template: log('Empty template: ' + templateFile) return None prefix = 'selectNetwork.' # Check obsolete option prefix for i in ['selectPairs.', 'select.network.']: if any(i in key for key in template.keys()): msg = 'obsolete option prefix detected: {}\n'.format(i) msg += 'Use {} instead'.format(prefix) raise Exception(msg) if all(prefix not in key for key in template.keys()): msg = 'no valid input option deteced in template file!\n' msg += 'Check the template below for supported options:\n' msg += TEMPLATE raise Exception(msg) # convert template into inpsDict keyList = [ i for i in list(inpsDict.keys()) if prefix + i in template.keys() ] for key in keyList: value = template[prefix + key] # bool if key in ['keepSeasonal']: inpsDict[key] = value elif value: # str if key in ['method', 'referenceFile', 'tempPerpList']: inpsDict[key] = value # date in YYYYMMDD elif key in ['referenceDate', 'startDate', 'endDate']: inpsDict[key] = ptime.yyyymmdd(value) # list of dates in YYYYMMDD elif key in ['excludeDate']: inps.excludeDate = ptime.yyyymmdd( [i.strip() for i in value.split(',')]) # float elif key in [ 'perpBaseMax', 'tempBaseMax', 'tempBaseMin', 'dopOverlapMin' ]: inpsDict[key] = float(value) # int elif key in ['connNum']: inpsDict[key] = int(value) # read tempPerpList from str if isinstance(inps.tempPerpList, str): inps.tempPerpList = [[float(j) for j in i.split(',')] for i in inps.tempPerpList.split(';')] # Initial network using input methods inps.method = inps.method.lower().replace('-', '_') if inps.method in ['star', 'ps']: inps.method = 'star' elif inps.method.startswith('seq'): inps.method = 'sequential' elif inps.method.startswith('hierar'): inps.method = 'hierarchical' elif inps.method in ['mst', 'min_spanning_tree', 'minimum_spanning_tree']: inps.method = 'mst' elif inps.method in ['all', 'sb']: inps.method = 'all' # for coherence prediction key = 'PLATFORM' if key in template.keys() and not inps.sensor: inps.sensor = template[key] key = 'COH_COLOR_JUMP' if key in template.keys(): inps.coh_thres = float(template[key]) # project name and sensor project_name = os.path.splitext(os.path.basename(inps.template_file))[0] log('project name: ' + project_name) if not inps.sensor: inps.sensor = sensor.project_name2sensor_name(project_name)[0] # Output directory/filename if not inps.outfile: if 'SCRATCHDIR' in os.environ: inps.out_dir = os.getenv( 'SCRATCHDIR') + '/' + project_name + '/PROCESS' else: try: inps.out_dir = os.path.dirname( os.path.abspath(inps.referenceFile)) except: inps.out_dir = os.path.dirname( os.path.abspath(inps.baseline_file)) inps.outfile = inps.out_dir + '/ifgram_list.txt' # Auto path of bl_list.txt file (for Miami user) if not inps.baseline_file and 'SCRATCHDIR' in os.environ: bl_file = os.path.join(os.getenv('SCRATCHDIR'), '{}/SLC/bl_list.txt'.format(project_name)) if os.path.isfile(bl_file): inps.baseline_file = bl_file if not inps.referenceFile and not inps.baseline_file: raise Exception( 'No baseline file or reference file found! At least one is required.' ) return inps
def read_inps2dict(inps): """Read input Namespace object info into inpsDict""" # Read input info into inpsDict inpsDict = vars(inps) inpsDict['PLATFORM'] = None auto_template = os.path.join(os.path.dirname(__file__), 'defaults/minopyApp_auto.cfg') # Read template file template = {} for fname in list(inps.template_file): temp = readfile.read_template(fname) temp = check_template_auto_value(temp, auto_file=auto_template) template.update(temp) for key, value in template.items(): inpsDict[key] = value if 'processor' in template.keys(): template['minopy.load.processor'] = template['processor'] prefix = 'minopy.load.' key_list = [ i.split(prefix)[1] for i in template.keys() if i.startswith(prefix) ] for key in key_list: value = template[prefix + key] if key in ['processor', 'updateMode', 'compression', 'autoPath']: inpsDict[key] = template[prefix + key] elif key in ['xstep', 'ystep']: inpsDict[key] = int(template[prefix + key]) elif value: inpsDict[prefix + key] = template[prefix + key] if not 'compression' in inpsDict or inpsDict['compression'] == False: inpsDict['compression'] = None inpsDict['xstep'] = inpsDict.get('xstep', 1) inpsDict['ystep'] = inpsDict.get('ystep', 1) # PROJECT_NAME --> PLATFORM if not 'PROJECT_NAME' in inpsDict: cfile = [ i for i in list(inps.template_file) if os.path.basename(i) != 'minopyApp.cfg' ] inpsDict['PROJECT_NAME'] = sensor.project_name2sensor_name(cfile)[1] msg = 'SAR platform/sensor : ' sensor_name = sensor.project_name2sensor_name(str( inpsDict['PROJECT_NAME']))[0] if sensor_name: msg += str(sensor_name) inpsDict['PLATFORM'] = str(sensor_name) else: msg += 'unknown from project name "{}"'.format( inpsDict['PROJECT_NAME']) print(msg) # Here to insert code to check default file path for miami user work_dir = os.path.dirname( os.path.dirname(os.path.dirname(inpsDict['outfile'][0]))) if inpsDict.get('autoPath', False): print(('check auto path setting for Univ of Miami users' ' for processor: {}'.format(inpsDict['processor']))) inpsDict = auto_path.get_auto_path(processor=inpsDict['processor'], work_dir=work_dir, template=inpsDict) return inpsDict