def check_solver_parameter_files(self): """ Checks solver parameters """ nt = getpar('nt', cast=int) dt = getpar('deltat', cast=float) f0 = getpar('f0', file='DATA/SOURCE', cast=float) if nt != PAR.NT: if system.getnode() == 0: print "WARNING: nt != PAR.NT" setpar('nt', PAR.NT) if dt != PAR.DT: if system.getnode() == 0: print "WARNING: dt != PAR.DT" setpar('deltat', PAR.DT) if f0 != PAR.F0: if system.getnode() == 0: print "WARNING: f0 != PAR.F0" setpar('f0', PAR.F0, file='DATA/SOURCE') if self.mesh.nproc != PAR.NPROC: if system.getnode() == 0: print 'WARNING: mesh.nproc != PAR.NPROC' if 'MULTIPLES' in PAR: if PAR.MULTIPLES: setpar('absorbtop', '.false.') else: setpar('absorbtop', '.true.')
def hello(self): """ Sends hello message from compute node """ import time time.sleep(1) print 'Hello from', system.getnode() print ''
def export_model(self, path): if system.getnode() == 0: for name in self.model_parameters: src = glob(join(self.databases, '*_' + name + '.bin')) dst = path unix.mkdir(dst) unix.cp(src, dst)
def process_traces(self, s, h): """ Performs data processing operations on traces """ # filter data if PAR.BANDPASS: s = sbandpass(s, h, PAR.FREQLO, PAR.FREQHI) if PAR.HIGHPASS: s = shighpass(s, h, PAR.FREQLO) if PAR.HIGHPASS: s = slowpass(s, h, PAR.FREQHI) # mute direct arrival if PAR.MUTE == 1: vel = PAR.MUTESLOPE off = PAR.MUTECONST s = smute(s, h, vel, off, constant_spacing=False) elif PAR.MUTE == 2: import system vel = PAR.MUTESLOPE * (PAR.NREC + 1) / (PAR.XMAX - PAR.XMIN) off = PAR.MUTECONST src = system.getnode() s = smute(s, h, vel, off, src, constant_spacing=True) return s
def initialize_io_machinery(self): """ Writes mesh files expected by input/output methods """ if system.getnode() == 0: model_set = set(self.model_parameters) inversion_set = set(self.inversion_parameters) parts = self.load(PATH.MODEL_INIT) try: path = PATH.GLOBAL + '/' + 'mesh' except: raise Exception if not exists(path): for key in list(setdiff(model_set, inversion_set)) + ['x', 'z']: unix.mkdir(path + '/' + key) for proc in range(PAR.NPROC): with open(path + '/' + key + '/' + '%06d' % proc, 'w') as file: np.save(file, parts[key][proc]) try: path = PATH.OPTIMIZE + '/' + 'm_new' except: return if not exists(path): savenpy(path, self.merge(parts))
def import_model(self, path): src = glob(join(path, 'model', '*')) dst = self.databases if system.getnode() == 0: self.save(dst, self.load(src, verbose=True)) else: self.save(dst, self.load(src))
def evaluate_gradient(self, model_dir=''): """ Compute event gradient by running adjoint simulation """ # get task number itask = system.getnode() # setup directories syn_dir = join(self.getpath, 'traces', 'syn') adj_dir = join(self.getpath, 'traces', 'adj') # set par.cfg file for solver self.set_par_cfg(external_model_dir=model_dir, output_dir=syn_dir, save_forward_wavefield=False, adjoint_sim=True, adjoint_dir=adj_dir) # set src.cfg for solver xsrc = self.sources[itask][0] zsrc = self.sources[itask][1] self.set_src_cfg(xs=float(xsrc), zs=float(zsrc)) # copy cfg files unix.cp(join(self.getpath, 'INPUT', 'par.cfg'), adj_dir) unix.cp(join(self.getpath, 'INPUT', 'src.cfg'), adj_dir) # run adjoint sim self.adjoint() # clean saved boundaries unix.rm(glob(join(syn_dir, 'proc*')))
def getname(self): """name of current source""" if not hasattr(self, 'sources'): # generate list of all sources paths = glob(PATH.SOLVER_FILES + '/' + 'SOURCE_*') self.sources = [] for path in paths: self.sources += [unix.basename(path).split('_')[-1]] self.sources.sort() return self.sources[system.getnode()]
def evaluate_function(self, path=''): """ Evaluate test function """ # generate synthetic data itask = system.getnode() output_dir = join(path, event_dirname(itask + 1)) model_dir = join(path, 'model') unix.mkdir(output_dir) self.generate_data(model_dir=model_dir, output_dir=output_dir) preprocess.evaluate_trial_step(self.getpath, output_dir)
def generate_data(self, model_dir=PATH.MODEL_TRUE, output_dir='', save_wavefield=False): """ Generate dataset. Defaults to generating synthetic data for true model. """ # get task number itask = system.getnode() # set par.cfg file for solver self.set_par_cfg(external_model_dir=model_dir, output_dir=output_dir, save_forward_wavefield=save_wavefield) # set src.cfg for solver xsrc = self.sources[itask][0] zsrc = self.sources[itask][1] self.set_src_cfg(xs=float(xsrc), zs=float(zsrc)) # copy cfg files unix.cp(join(self.getpath, 'INPUT', 'par.cfg'), output_dir) unix.cp(join(self.getpath, 'INPUT', 'src.cfg'), output_dir) # run forward sim self.forward()
def check_mesh_properties(self, path=None, parameters=None): if not hasattr(self, '_mesh_properties'): if not path: path = PATH.MODEL_INIT if not parameters: parameters = self.parameters M = np.loadtxt(path) nrow = M.shape[0] ncol = M.shape[1] if PAR.NPROC != 1: if system.getnode() == 0: print 'Warning: mesh.nproc != PAR.NPROC' self.mesh_properties = Struct([ ['nproc', 1], ['ngll', [nrow]]]) return self.mesh_properties
def getnode(self): return system.getnode()
def export_kernels(self, path): unix.mkdir_gpfs(join(path, 'kernels')) src = join(self.getpath, 'OUTPUT_FILES/proc000000_rhop_alpha_beta_kernel.dat') dst = join(path, 'kernels', '%06d' % system.getnode()) unix.cp(src, dst)
def export_model(self, path): if system.getnode() == 0: src = join(self.getpath, 'DATA/model_velocity.dat_input') dst = path unix.cp(src, dst)
def export_model(self, path): if system.getnode() == 0: src = join(self.getpath, 'DATA/proc000000_rho_vp_vs.dat') dst = path unix.cp(src, dst)
def getpath(self): itask = system.getnode() return join(PATH.SOLVER, event_dirname(itask + 1))
def getnode(self): # because it is sometimes useful to overload system.getnode return system.getnode()