def setup(self): # link adios binaries self.ln(getpath('adios', 'bin'), 'adios') for i in range(self.niters): # workspace for the i-th iteration self.add(ws := Workspace(f'iter_{i:02d}', {'iteration': i})) # link initial model path_model = getpath('model_init') if i == 0 else self.abs( f'iter_{i-1:02d}/model_new.bp') ws.add(partial(ws.ln, path_model, 'model_init.bp'), 'link_models') # compute kernels ws.add( create_kernel('kernel', {'path_model': ws.abs('model_init.bp')})) ws.add(partial(ws.ln, ws.abs('kernel/kernels.bp'), 'kernels.bp'), 'link_kernels') # compute direction ws.add(partial(self.compute_direction, ws, i)) # line search ws.add(create_search('search'))
def pick_catalog(): """Process observed traces in catalog.""" ws = Workspace() ws.rm('picked') obs = getpath('obs') syn = getpath('syn') if obs and syn: obsdir = ws.subdir(obs) syndir = ws.subdir(syn) for trace in obsdir.ls(): event = trace.split('.')[0] ws.mkdir(f'picked/{event}') ws.add( asdf_task((obsdir.abs(trace), syndir.abs(syndir.ls(grep=f'{event}.*')[0])), None, partial(pick, event), input_type='stream', accessor=True, name=event)) return ws
async def _apply_preconditioner(self): kl = 'kernels_smooth.bp' if self.smooth_kernels else 'kernels_raw.bp' hess = 'hess_vel_smooth.bp' if self.smooth_hessian else 'kernels_raw.bp' cmd = f'xprecond_kernels {kl} {hess} kernels_precond.bp {self.precondition}' await self.mpiexec(getpath('adios', 'bin', cmd), getsize(self), 1, 0, 'adios')
def getpars(d: Optional[Directory] = None) -> Par_file: """Get entries in Par_file.""" if d is None: d = Directory(getpath('specfem')) pars: dict = {} for line in d.readlines('DATA/Par_file'): if '=' in line: keysec, valsec = line.split('=')[:2] key = keysec.split()[0] val = valsec.split('#')[0].split()[0] if val == '.true': pars[key] = True elif val == '.false.': pars[key] = False elif val.isnumeric(): pars[key] = int(val) else: try: pars[key] = float(val.replace('D', 'E').replace('d', 'e')) except: pars[key] = val return cast(Par_file, pars)
def _setup_forward(self): # specfem directory warpper d = Directory(getpath('specfem')) # setup specfem workspace self.mkdir('DATA') self.mkdir('OUTPUT_FILES') self.mkdir('DATABASES_MPI') self.ln(d.abs('bin')) self.cp(d.abs('DATA/Par_file'), 'DATA') self.cp( self.path_event if self.path_event else d.abs('DATA/CMTSOLUTION'), 'DATA/CMTSOLUTION') self.cp( self.path_stations if self.path_stations else d.abs('DATA/STATIONS'), 'DATA/STATIONS') # link model for subdir in d.ls('DATA', isdir=True): if subdir != 'GLL' or not self.path_model: self.ln(d.abs('DATA', subdir), 'DATA') if self.path_model: self.mkdir('DATA/GLL') self.ln(self.path_model, 'DATA/GLL/model_gll.bp') # update Par_file pars: Par_file = {'SIMULATION_TYPE': 1} if self.save_forward is not None: pars['SAVE_FORWARD'] = self.save_forward if self.monochromatic_source is not None: pars['USE_MONOCHROMATIC_CMT_SOURCE'] = self.monochromatic_source if self.duration is not None: pars['RECORD_LENGTH_IN_MINUTES'] = self.duration if self.path_model is not None: pars['MODEL'] = 'GLL' if self.lddrk is not None: pars['USE_LDDRK'] = self.lddrk if self.transient_duration is not None: if self.duration is None: raise ValueError( 'solver duration must be set if transient_duration exists') pars['STEADY_STATE_KERNEL'] = True pars[ 'STEADY_STATE_LENGTH_IN_MINUTES'] = self.duration - self.transient_duration else: pars['STEADY_STATE_KERNEL'] = False setpars(self, pars)
async def _apply_smoother(self, hess: bool): import adios2 names = [] if hess and self.precondition: src = 'hess_vel_raw.bp' dst = 'hess_vel_smooth.bp' cmd = f'xconvert_hessian kernels_raw.bp DATABASES_MPI/solver_data.bp hess_vel_raw.bp {self.precondition}' await self.mpiexec(getpath('adios', 'bin', cmd), getsize(self), 1, 0, 'adios') else: src = 'kernels_raw.bp' dst = 'hessian_smooth.bp' if hess else 'kernels_smooth.bp' # get the names of the kernels to be smoothed with adios2.open(self.abs(src), 'r') as fh: # type: ignore pf = '_crust_mantle/array' for fstep in fh: step_vars = fstep.available_variables() for name in step_vars: if name.endswith(pf): name = name.split(pf)[0] if name.startswith('hess_'): if hess: names.append(name) else: if not hess: names.append(name) # save the number of kernels being smoothed for probe_smoother kind = 'smooth_' + ('hess' if hess else 'kl') cache[kind] = len(names) # get the command to call smoother cmd = 'bin/xsmooth_laplacian_sem_adios' radius = self.smooth_hessian if hess else self.smooth_kernels if isinstance(radius, list): radius = max(radius[1], radius[0] * radius[2]**self.iteration) kl = ','.join(names) await self.mpiexec( f'{cmd} {radius} {radius} {kl} {src} DATABASES_MPI/ {dst} > OUTPUT_FILES/{kind}.txt', getsize(self), 1, 0, 'smooth_kernels') # reset status del cache[kind]
def setup(self): self.clear() # add steps to compute and process adjoint sources self.add(self._prepare_frequencies) # create super source self.add(self._encode_events) # compute weighting if self.event_weighting or self.station_weighting: if not catalogdir.has(self.ampstr): self.add(compute_weightings(self.event_weighting, self.station_weighting, catalogdir.abs(self.ampstr))) self.add(self._load_weightings) if self.path_encoded: self.add(partial(self.ln, self.path_encoded, 'observed.ft.h5'), 'link_observed') elif self.test_encoding == 1: # generate observed traces self.add(solver := create_solver('solver_observed', { 'path_event': self.abs('SUPERSOURCE'), 'path_stations': self.abs('SUPERSTATION'), 'path_model': getpath('model_true'), 'monochromatic_source': True, 'save_forward': False, 'process_traces': { 'dst': self.abs('observed.ft.h5'), 'func': partial(self._ft, None), 'input_type': 'stream', 'output_tag': 'FT', 'accessor': True } })) else: # prepare observed data and save to catalog directory ws = Workspace('prepare_observed', concurrent=True) for event in get_events(): # location of processed traces if catalogdir.has(fname := f'{self.freqstr}/{event}.ft.h5'): continue
from typing import Optional, Dict, Tuple from pypers import Directory, getpath, cache # directory containing catalog files catalogdir = Directory(getpath('catalog')) def get_catalog() -> dict: """Load catalog based on config.toml.""" if 'catalog' not in cache: cache['catalog'] = catalogdir.load('catalog.toml') return cache['catalog'] def get_stations(event: str, group: Optional[int] = None): """Get available stations in current frequency group.""" catalog = get_catalog() if group is None: return list(catalog[event].keys()) stations = [] for station, traces in catalog[event].items(): if len(traces[group]) > 0: stations.append(station) return stations
class Ortho(Kernel): # taper traces taper: Optional[float] = field() # time step for frequency selection dt: float = field(required=True) # simulation duration duration: float = field(required=True) # transient state duration for source encoding transient_duration: float = field(required=True) # normalize source to the magnitude normalize_source: bool = field(True) # remove response for observed traces remove_response: bool = field(False) # test orthogonality (1: synthetic encoded traces 2: synthetic observed traces) test_encoding: int = field(0) # path to encoded observed traces path_encoded: Optional[str] = field() # event condition number geographical weighting event_weighting: Optional[float] = field() # station condition number geographical weighting station_weighting: Optional[float] = field() # amplify high frequency kernels to compensate for attenuation compensate_attenuation: Optional[Union[int, float]] = field() # randomize frequency every x iteration, 0 for not randomizing frequency randomize_frequency: int = field(1) # frequency interval df: float = field() # frequency step for observed traces kf: int = field(1) # frequency indices used for encoding fidx: List[int] = field() # frequency slots assigned to events fslots: Dict[str, List[int]] = field() # number of time steps in transient state nt_ts: int = field() # number of time steps in stationary state nt_se: int = field() # frequency weights from source normalization famp: Optional[np.ndarray] = field() # frequency weights from geographical weighting gamp: Optional[np.ndarray] = field() # station geographical weighting samp: Optional[dict] = field() @property def freq(self): """Frequencies used for encoding.""" return fftfreq(self.nt_se, self.dt)[self.fidx[0]: self.fidx[-1]] @property def freqstr(self): """String representation of frequency band.""" period = f'p{int(self.period_range[0])}-{int(self.period_range[-1])}' duration = f'd{int(self.transient_duration)}-{int(self.duration)}' dt = f't{self.dt}' seed = f's{self.rng}' return f'{period}_{duration}_{dt}_{seed}' @property def rng(self): return int(self.iteration / self.randomize_frequency) + self.seed @property def ampstr(self): """String representation of geographical weightings.""" return f'weightings_{self.event_weighting}_{self.station_weighting}' def setup(self): self.clear() # add steps to compute and process adjoint sources self.add(self._prepare_frequencies) # create super source self.add(self._encode_events) # compute weighting if self.event_weighting or self.station_weighting: if not catalogdir.has(self.ampstr): self.add(compute_weightings(self.event_weighting, self.station_weighting, catalogdir.abs(self.ampstr))) self.add(self._load_weightings) if self.path_encoded: self.add(partial(self.ln, self.path_encoded, 'observed.ft.h5'), 'link_observed') elif self.test_encoding == 1: # generate observed traces self.add(solver := create_solver('solver_observed', { 'path_event': self.abs('SUPERSOURCE'), 'path_stations': self.abs('SUPERSTATION'), 'path_model': getpath('model_true'), 'monochromatic_source': True, 'save_forward': False, 'process_traces': { 'dst': self.abs('observed.ft.h5'), 'func': partial(self._ft, None), 'input_type': 'stream', 'output_tag': 'FT', 'accessor': True } })) else: # prepare observed data and save to catalog directory ws = Workspace('prepare_observed', concurrent=True) for event in get_events(): # location of processed traces if catalogdir.has(fname := f'{self.freqstr}/{event}.ft.h5'): continue dst = catalogdir.abs(fname) # taks name name = f'process_{event}' if not catalogdir.has(f'traces/{event}.h5'): # generate and process observed data ws.add(solver := create_solver(f'solver_{event}', { 'path_event': catalogdir.abs(f'events/{event}'), 'path_stations': catalogdir.abs(f'stations/STATIONS.{event}'), 'path_model': getpath('model_true'), 'monochromatic_source': False, 'save_forward': False, 'process_traces': { 'dst': dst, 'func': partial(self._ft, event), 'input_type': 'stream', 'output_tag': 'FT', 'accessor': True } })) else: # process observed data ws.add(asdf_task( catalogdir.abs(f'traces/{event}.h5'), dst, partial(self._ft, event), input_type='stream', output_tag='FT', accessor=True, name=name ))
ws.add(asdf_task( catalogdir.abs(f'traces/{event}.h5'), dst, partial(self._ft, event), input_type='stream', output_tag='FT', accessor=True, name=name )) if len(ws): self.add(ws) # get Fourier coefficients from observed traces self.add(partial(self.mpiexec, self._encode_observed, walltime='encode_observed')) # generate synthetic traces self.add(solver := create_solver('solver_synthetic', { 'path_event': self.abs('SUPERSOURCE'), 'path_stations': self.abs('SUPERSTATION'), 'path_model': self.path_model or getpath('model_init'), 'monochromatic_source': True, 'save_forward': True, 'process_traces': { 'dst': self.abs('synthetic.ft.h5'), 'func': partial(self._ft, None), 'input_type': 'stream', 'accessor': True, 'output_tag': 'FT' } })) # compare traces only if test_encoding == 2 if self.test_encoding == 2: return