def export_traces(self, path, prefix='traces/obs'): # hack deals with problems on parallel filesystem unix.mkdir(join(path, 'traces'), noexit=True) src = join(self.getpath, prefix) dst = join(path, 'traces', basename(self.getpath)) unix.cp(src, dst)
def generate_mesh(self, model_path=None, model_name=None, model_type='gll'): """ Performs meshing and database generation """ assert(model_name) assert(model_type) self.initialize_solver_directories() unix.cd(self.getpath) if model_type in ['gll']: par = getpar('MODEL').strip() if par != 'gll': if self.getnode == 0: print 'WARNING: Unexpected Par_file setting:' print 'MODEL =', par assert(exists(model_path)) self.check_mesh_properties(model_path) src = glob(model_path +'/'+ '*') dst = self.model_databases unix.cp(src, dst) self.call('bin/xmeshfem3D') self.call('bin/xgenerate_databases') self.export_model(PATH.OUTPUT +'/'+ model_name) else: raise NotImplementedError
def setup(self): """ Prepares solver for inversion or migration """ # clean up for new inversion unix.rm(self.getpath) # As input for an inversion or migration, users can choose between # providing data, or providing a target model from which data are # generated on the fly. In the former case, a value for PATH.DATA must # be supplied; in the latter case, a value for PATH.MODEL_TRUE must be # provided if PATH.DATA: # copy user supplied data self.initialize_solver_directories() src = glob(PATH.DATA +'/'+ basename(self.getpath) +'/'+ '*') dst = 'traces/obs/' unix.cp(src, dst) else: # generate data on the fly self.generate_data( model_path=PATH.MODEL_TRUE, model_name='model_true', model_type='gll') # prepare initial model self.generate_mesh( model_path=PATH.MODEL_INIT, model_name='model_init', model_type='gll') self.initialize_adjoint_traces()
def evaluate_gradient(self, model_dir=''): """ Compute event gradient by running adjoint simulation """ # get task number itask = system.getnode() # setup directories syn_dir = join(self.getpath, 'traces', 'syn') adj_dir = join(self.getpath, 'traces', 'adj') # set par.cfg file for solver self.set_par_cfg(external_model_dir=model_dir, output_dir=syn_dir, save_forward_wavefield=False, adjoint_sim=True, adjoint_dir=adj_dir) # set src.cfg for solver xsrc = self.sources[itask][0] zsrc = self.sources[itask][1] self.set_src_cfg(xs=float(xsrc), zs=float(zsrc)) # copy cfg files unix.cp(join(self.getpath, 'INPUT', 'par.cfg'), adj_dir) unix.cp(join(self.getpath, 'INPUT', 'src.cfg'), adj_dir) # run adjoint sim self.adjoint() # clean saved boundaries unix.rm(glob(join(syn_dir, 'proc*')))
def write_model(self, path='', suffix=''): """ Writes model in format used by solver """ unix.mkdir(path) src = PATH.OPTIMIZE +'/'+ 'm_' + suffix dst = path +'/'+ 'model' unix.mkdir(dst) unix.cp(glob(join(PATH.MODEL, '*.bin')), dst) solver.split(src, dst, '.bin')
def setup(self): """ Lays groundwork for inversion """ if PAR.BEGIN == 1: # copy initial gradient to working directory unix.cp(PATH.GRAD_INIT, PATH.GRAD_AGG) preprocess.setup() postprocess.setup() optimize.setup() for isrc in range(PAR.NSRC): solver.setup(subset=[isrc])
def generate_mesh(self, model_path=None, model_name=None, model_type='gll'): """ Performs meshing and database generation """ assert(model_name) assert(model_type) self.initialize_solver_directories() unix.cd(self.getpath) assert(exists(model_path)) self.check_mesh_properties(model_path) src = model_path dst = join(self.getpath, 'DATA/proc000000_rho_vp_vs.dat') unix.cp(src, dst) self.export_model(PATH.OUTPUT +'/'+ model_name)
def setup(self): """ Perform setup. Generates synthetic observed data. """ # clean up solver directories unix.rm(self.getpath) self.initialize_solver_directories() if PATH.DATA: # copy data to scratch dirs src = glob(PATH.DATA +'/'+ basename(self.getpath) +'/'+ '*') dst = 'traces/obs/' unix.cp(src, dst) else: # generate data on the fly output_dir = join(self.getpath, 'traces', 'obs') self.generate_data(model_dir=PATH.MODEL_TRUE, output_dir=output_dir)
def initialize_adjoint_traces(self): super(specfem3d, self).initialize_adjoint_traces() # workaround for SPECFEM2D's use of different name conventions for # regular traces and 'adjoint' traces if PAR.FORMAT in ['SU', 'su']: files = glob(self.cwd +'/'+ 'traces/adj/*SU') unix.rename('_SU', '_SU.adj', files) # workaround for SPECFEM3D's requirement that all components exist, # even ones not in use unix.cd(self.cwd +'/'+ 'traces/adj') for iproc in range(PAR.NPROC): for channel in ['x', 'y', 'z']: src = '%d_d%s_SU.adj' % (iproc, PAR.CHANNELS[0]) dst = '%d_d%s_SU.adj' % (iproc, channel) if not exists(dst): unix.cp(src, dst)
def initialize_adjoint_traces(self): super(specfem2d, self).initialize_adjoint_traces() # work around SPECFEM2D's use of different name conventions for # regular traces and 'adjoint' traces if PAR.FORMAT in ['SU', 'su']: files = glob('traces/adj/*.su') unix.rename('.su', '.su.adj', files) # work around SPECFEM2D's requirement that all components exist, # even ones not in use if PAR.FORMAT in ['SU', 'su']: unix.cd(self.cwd +'/'+ 'traces/adj') for channel in ['x', 'y', 'z', 'p']: src = 'U%s_file_single.su.adj' % PAR.CHANNELS[0] dst = 'U%s_file_single.su.adj' % channel if not exists(dst): unix.cp(src, dst)
def generate_mesh(self, model_path=None, model_name=None, model_type='gll'): """ Performs meshing and database generation """ assert(model_name) assert(model_type) self.initialize_solver_directories() unix.cd(self.cwd) assert(exists(model_path)) self.check_mesh_properties(model_path) src = glob(join(model_path, '*')) dst = join(self.cwd, 'DATA') unix.cp(src, dst) if self.taskid == 0: self.export_model(PATH.OUTPUT +'/'+ model_name)
def generate_mesh(self, model_path=None, model_name=None, model_type='gll'): """ Performs meshing and database generation """ assert(model_name) assert(model_type) self.initialize_solver_directories() unix.cd(self.getpath) if model_type == 'gll': assert (exists(model_path)) self.check_mesh_properties(model_path) unix.cp(glob(model_path +'/'+ '*'), self.model_databases) self.mpirun('bin/xmeshfem3D') self.export_model(PATH.OUTPUT +'/'+ model_name) else: raise NotImplementedError
def generate_data(self, model_dir=PATH.MODEL_TRUE, output_dir='', save_wavefield=False): """ Generate dataset. Defaults to generating synthetic data for true model. """ # get task number itask = system.getnode() # set par.cfg file for solver self.set_par_cfg(external_model_dir=model_dir, output_dir=output_dir, save_forward_wavefield=save_wavefield) # set src.cfg for solver xsrc = self.sources[itask][0] zsrc = self.sources[itask][1] self.set_src_cfg(xs=float(xsrc), zs=float(zsrc)) # copy cfg files unix.cp(join(self.getpath, 'INPUT', 'par.cfg'), output_dir) unix.cp(join(self.getpath, 'INPUT', 'src.cfg'), output_dir) # run forward sim self.forward()
def initialize_solver_directories(self): """ Initialize solver directories. """ unix.mkdir(self.getpath) unix.cd(self.getpath) # create directory structure unix.mkdir('INPUT') unix.mkdir('bin') unix.mkdir('traces/obs') unix.mkdir('traces/syn') unix.mkdir('traces/adj') # copy exectuables src = glob(join(PATH.SOLVER_BIN, '*')) dst = 'bin/' unix.cp(src, dst) # copy input files src = glob(join(PATH.SOLVER_INPUT, '*')) dst = 'INPUT/' unix.cp(src, dst)
def generate_precond(self, process_traces=None, model_path=None, model_name=None, model_type='gll'): assert(model_name) assert(model_type) assert (exists(model_path)) self.initialize_solver_directories() unix.cd(self.getpath) assert(exists(model_path)) self.check_mesh_properties(model_path) src = model_path dst = join(self.getpath, 'DATA/proc000000_rho_vp_vs.dat') unix.cp(src, dst) self.export_model(PATH.OUTPUT +'/'+ model_name) self.forward() unix.mv(self.data_wildcard, 'traces/syn') self.initialize_adjoint_traces('traces/syn') process_traces(self.getpath) self.adjoint() self.export_kernels(PATH.GLOBAL)
def initialize_solver_directories(self): """ Creates directory structure expected by SPECFEM3D, copies executables, and prepares input files. Executables must be supplied by user as there is currently no mechanism for automatically compiling from source. """ unix.mkdir(self.getpath) unix.cd(self.getpath) # create directory structure unix.mkdir('bin') unix.mkdir('DATA') unix.mkdir('OUTPUT_FILES') unix.mkdir('traces/obs') unix.mkdir('traces/syn') unix.mkdir('traces/adj') unix.mkdir(self.model_databases) unix.mkdir(self.kernel_databases) # copy exectuables src = glob(PATH.SPECFEM_BIN +'/'+ '*') dst = 'bin/' unix.cp(src, dst) # copy input files src = glob(PATH.SPECFEM_DATA +'/'+ '*') dst = 'DATA/' unix.cp(src, dst) src = 'DATA/' + self.source_prefix +'_'+ basename(self.getpath) dst = 'DATA/' + self.source_prefix unix.cp(src, dst) self.check_solver_parameter_files()
def export_model(self, path): if self.taskid == 0: unix.mkdir(path) src = glob(join(self.cwd, 'DATA/*.bin')) dst = path unix.cp(src, dst)
def save_kernels_sum(self): src = PATH.GLOBAL +'/'+ 'kernels/sum' dst = PATH.OUTPUT +'/'+ 'kernels' unix.mkdir(dst) unix.cp(src, dst)
def save_traces(self): src = PATH.GLOBAL +'/'+ 'traces' dst = PATH.OUTPUT unix.cp(src, dst)
def export_model(self, path): if self.getnode == 0: unix.mkdir(path) src = glob(join(self.getpath, 'DATA/*.bin')) dst = path unix.cp(src, dst)
def export_model(self, path): unix.mkdir(path) src = glob(join(self.cwd, 'DATA/*.bin')) dst = path unix.cp(src, dst)
def export_model(self, path): if system.getnode() == 0: src = join(self.getpath, 'DATA/model_velocity.dat_input') dst = path unix.cp(src, dst)
def export_traces(self, path, prefix='traces/obs'): unix.mkdir_gpfs(join(path, 'traces')) src = join(self.getpath, prefix) dst = join(path, 'traces', self.getname) unix.cp(src, dst)
def export_model(self, path, solver_parameters=['rho', 'vp', 'vs']): if self.getnode == 0: unix.mkdir(path) for key in solver_parameters: files = glob(join(self.model_databases, '*'+key+'.bin')) unix.cp(files, path)
def export_model(self, path): if system.getnode() == 0: src = join(self.getpath, 'DATA/proc000000_rho_vp_vs.dat') dst = path unix.cp(src, dst)
def export_kernels(self, path): unix.mkdir(join(path, 'kernels'), noexit=True) src = join(self.getpath, 'OUTPUT_FILES/proc000000_rhop_alpha_beta_kernel.dat') dst = join(path, 'kernels', '%s' % basename(self.getpath)) unix.cp(src, dst)
def import_traces(self, path): src = glob(join(path, 'traces', basename(self.getpath), '*')) dst = join(self.getpath, 'traces/obs') unix.cp(src, dst)
def export_traces(self, path, prefix='traces/obs'): unix.mkdir(join(path)) src = join(self.cwd, prefix) dst = join(path, self.source_name) unix.cp(src, dst)
def import_model(self, path): src = glob(path + '/' + 'model/*') dst = join(self.getpath, 'DATA/') unix.cp(src, dst)
def import_model(self, path): src = join(path + '/' + 'model') dst = join(self.getpath, 'DATA/model_velocity.dat_input') unix.cp(src, dst)
def import_model(self, path): src = join(path +'/'+ 'model') dst = join(self.getpath, 'DATA/proc000000_rho_vp_vs.dat') unix.cp(src, dst)
def import_traces(self, path): src = glob(join(path, 'traces', self.getname, '*')) dst = join(self.getpath, 'traces/obs') unix.cp(src, dst)
def prepare_model(self): model = PATH.OUTPUT + '/' + 'model_init' assert exists(model) unix.cp(model, PATH.SCRATCH + '/' + 'model')
def export_kernels(self, path): unix.mkdir_gpfs(join(path, 'kernels')) src = join(self.getpath, 'OUTPUT_FILES/proc000000_rhop_alpha_beta_kernel.dat') dst = join(path, 'kernels', '%06d' % system.getnode()) unix.cp(src, dst)
def save_kernels_sum(self): src = PATH.SCRATCH + '/' + 'kernels/sum' dst = PATH.OUTPUT + '/' + 'kernels' unix.mkdir(dst) unix.cp(src, dst)
def update(self,chi,kk,tflag): """ Updates SRVM algorithm history """ unix.cd(self.path) Shat_chi = chi mm = 500 if tflag == 1: for ii in range(mm): jj = ii + 1 + kk - mm if jj > 0 : unix.cp('a_%04d' % jj,'A') a = self.loadtxt('A') unix.cp('nu_%04d' % jj,'Nu') nu = self.loadtxt('Nu') unix.cp('w_%04d' % jj,'W') wtemp = self.load('W') #print 'A,nu', A,nu xtemp = dot(wtemp,Shat_chi) Shat_chi = Shat_chi - xtemp * nu / a * wtemp elif tflag == 0: for ii in range(mm): jj = kk - ii if jj > 0 : unix.cp('a_%04d' % jj,'A') a = self.loadtxt('A') unix.cp('nu_%04d' % jj,'Nu') nu = self.loadtxt('Nu') unix.cp('w_%04d' % jj,'W') wtemp = self.load('W') #print 'A,nu', A,nu xtemp = dot(wtemp,Shat_chi) Shat_chi = Shat_chi - xtemp * nu / a * wtemp return Shat_chi
def save_kernels(self): src = PATH.SCRATCH + '/' + 'kernels' dst = PATH.OUTPUT unix.mkdir(dst) unix.cp(src, dst)
def import_model(self, path): src = glob(path +'/'+ 'model/*') dst = join(self.cwd, 'DATA/') unix.cp(src, dst)
def save_traces(self): src = PATH.SCRATCH +'/'+ 'traces' dst = PATH.OUTPUT unix.cp(src, dst)
def prepare_model(self): model = PATH.OUTPUT +'/'+ 'model_init' assert exists(model) unix.cp(model, PATH.GLOBAL +'/'+ 'model')
def export_model(self, path, solver_parameters=['rho', 'vp', 'vs']): if self.getnode == 0: unix.mkdir(path) for key in solver_parameters: files = glob(join(self.model_databases, '*' + key + '.bin')) unix.cp(files, path)
def save_kernels(self): src = PATH.GLOBAL +'/'+ 'kernels' dst = PATH.OUTPUT unix.mkdir(dst) unix.cp(src, dst)
def export_traces(self, path, prefix='traces/obs'): unix.mkdir(join(path, 'traces'), noexit=True) src = join(self.getpath, prefix) dst = join(path, 'traces', basename(self.getpath)) unix.cp(src, dst)
def save_traces(self): src = PATH.SCRATCH + '/' + 'traces' dst = PATH.OUTPUT unix.cp(src, dst)
def setup(self): """ Performs any required initialization or setup tasks """ src = glob(join(PATH.MODEL_INIT, '*.bin')) dst = join(PATH.MODELS, 'model_est') unix.cp(src, dst)
def update(self, chi, kk, tflag): """ Updates SRVM algorithm history """ unix.cd(self.path) Shat_chi = chi if tflag == 1: for ii in range(kk): jj = ii + 1 unix.cp('a_%04d' % jj, 'A') A = self.loadtxt('A') unix.cp('nu_%04d' % jj, 'Nu') nu = self.loadtxt('Nu') unix.cp('w_%04d' % jj, 'W') wtemp = self.load('W') xtemp = dot(wtemp, Shat_chi) Shat_chi = Shat_chi - xtemp * nu / A * wtemp elif tflag == 0: Shat_chi = chi for ii in range(kk): jj = kk - ii unix.cp('a_%04d' % jj, 'A') A = self.loadtxt('A') unix.cp('nu_%04d' % jj, 'Nu') nu = self.loadtxt('Nu') unix.cp('w_%04d' % jj, 'W') wtemp = self.load('W') xtemp = dot(wtemp, Shat_chi) Shat_chi = Shat_chi - xtemp * nu / A * wtemp return Shat_chi
def export_model(self, path, parameters=['rho', 'vp', 'vs','Qkappa']): if self.taskid == 0: unix.mkdir(path) for key in parameters: files = glob(join(self.model_databases, '*'+key+'.bin')) unix.cp(files, path)