def __init__(self, ds_fid): """ The class contains a PyASDF Dataset that will be used to load observed and synthetic data, based on user input """ self.ds = ASDFDataSet(ds_fid) # Initiate an empty Manager to get access to its config self.mgmt = Manager() self.info = Info()
def setup(self, code): """ Load waveforms from the dataset, and preprocess for windowing """ # If Configure was run first, this will pass its config object over try: self.mgmt = Manager(ds=self.ds, config=self.mgmt.config) self.mgmt.load(code, config=False, synthetic_tag="synthetic_m00s00") self.mgmt.config.save_to_ds = False self.mgmt.standardize().preprocess() except Exception as e: raise WasherError(e)
def mgmt(config, event, st_obs, st_syn, inv): """ A manager filled with data and that has progressed through the workflow """ mgmt = Manager(config=config, event=event, st_obs=st_obs, st_syn=st_syn, inv=inv) mgmt.standardize() mgmt.preprocess() mgmt.window() mgmt.measure() return mgmt
def test_pyaflowa_process_station(tmpdir, seisflows_workdir, seed_data, source_name, station_name, PAR, PATH): """ Test the single station processing function """ # Turn off client to avoid searching FDSN, force local data search PAR.CLIENT = None PATH.DATA = tmpdir.strpath pyaflowa = Pyaflowa(structure="seisflows", sfpaths=PATH, sfpar=PAR, iteration=1, step_count=0) # Copy working directory to tmpdir to avoid creating unnecessary files shutil.copytree(src=seisflows_workdir, dst=os.path.join(tmpdir, "scratch")) shutil.copytree(src=seed_data, dst=os.path.join(tmpdir, "seed")) # Set up the same machinery as process_event() io = pyaflowa.setup(source_name) with ASDFDataSet(io.paths.ds_file) as ds: mgmt = Manager(ds=ds, config=io.config) mgmt, io = pyaflowa.process_station(mgmt=mgmt, code="NZ.BFZ.??.???", io=io) assert (io.nwin == mgmt.stats.nwin == 3) assert (io.misfit == pytest.approx(65.39037, .001))
def test_read_write_from_asdfdataset(tmpdir, mgmt_pre, config): """ Write a Manager into an ASDFDataSet and then read it back """ with ASDFDataSet(os.path.join(tmpdir, "test_dataset.h5")) as ds: mgmt_pre.ds = ds mgmt_pre.write(write_to="ds") # Load data back from dataset mgmt_loaded = Manager(ds=ds, config=config) mgmt_loaded.load("NZ.BFZ", path="default") # Manager has no equivalence represenation so just check some ids assert(mgmt_pre.stats.event_id == mgmt_loaded.stats.event_id) assert(mgmt_pre.stats.len_obs == mgmt_loaded.stats.len_obs) assert(mgmt_pre.stats.len_syn == mgmt_loaded.stats.len_syn) assert(mgmt_pre.stats.inv_name == mgmt_loaded.stats.inv_name)
def mgmt_pre(config, event, st_obs, st_syn, inv): """ A manager filled with data but pre-workflow """ return Manager(config=config, event=event, st_obs=st_obs, st_syn=st_syn, inv=inv)
def gather_simple(self, event, sta, min_period, max_period, path_dict=None, component=None): """ Manually set the model values based on inspection of the Inspector Don't return windows or anything, keep it simple """ models = { "m00": ("i01/s00", "a"), "m03": ("i03/s03", "a"), "m09": ("i09/s02", "a"), "m12": ("i12/s04", "a"), "m17": ("i17/s01", "a"), "m24": ("i07/s01", "b"), "m28": ("i11/s03", "c"), } st_obs, synthetics = None, {} windows = None for model, tup in models.items(): path, tag = tup if path_dict: ds_fid = os.path.join(path_dict[tag], f"{event_id}.h5") else: ds_fid = f"{event_id}{tag}.h5" with asdf(ds_fid, mode="r") as ds: mgmt = Manager(ds=ds) mgmt.load(sta, path) mgmt.config.save_to_ds = False mgmt.config.min_period = min_period mgmt.config.max_period = max_period mgmt.standardize().preprocess() if component: synthetics[model] = mgmt.st_syn.select( component=component).copy() else: synthetics[model] = mgmt.st_syn.copy() if st_obs is None: if component: st_obs = mgmt.st_obs.select(component=component).copy() else: st_obs = mgmt.st_obs.copy() self.st_obs = st_obs self.synthetics = synthetics
def _gather_model_from_dataset(self, dsfid, model=None, init_or_final=None, save_windows=False): """ Gather data from an ASDFDataSet based on the given model (iter/step) :type dsfid: str :param dsfid: file identifier for the dataset :type model: str :param model: iteration/step, e.g. 'i01/s00' :type init_or_final: str :param init_or_final: for choosing default values if model is None * 'init': choose the first iteration/step for the initial model * 'final': choose final iteration/step for final model :return: """ assert (init_or_final in ["init", "final"]) with ASDFDataSet(dsfid, mode="r") as ds: if model is None: configs = ds.auxiliary_data.Configs if init_or_final is "init": idx = 0 elif init_or_final is "final": idx = -1 iter_ = configs.list()[idx] step_ = configs[iter_].list()[idx] model = f"{iter_}/{step_}" # Use the Manager class to load in waveform data mgmt = Manager(ds=ds) mgmt.load(code=self.station, path=model) mgmt.config.save_to_ds = False # !!! NZ Temp network skip remove response net, sta = self.station.split(".") remove_response = not bool(net in ["ZX", "Z8"]) # Overwrite the filter corners stored in the dataset mgmt.config.min_period = self.min_period mgmt.config.max_period = self.max_period mgmt.standardize().preprocess(remove_response=remove_response) # See if windows can be picked for the given load setup if save_windows: pf_cfg = "nznorth_6-30s" mgmt.config.pyflex_preset = pf_cfg mgmt.config.pyflex_config, _ = set_pyflex_config( mgmt.config.min_period, mgmt.config.max_period, pf_cfg) mgmt.window() print(f"{mgmt.stats.nwin} windows for {init_or_final}") if save_windows == "init": self.init_windows = mgmt.windows elif save_windows == "final": self.final_windows = mgmt.windows else: # Any check that returns no windows will set this False self.windows = mgmt.windows # Store data in class attributes, obs waveforms will be the same if self._st_obs is None: self._st_obs = mgmt.st_obs if self._inv is None: self._inv = mgmt.inv if self._event is None: self._event = mgmt.event setattr(self, f"_st_{init_or_final}", mgmt.st_syn) setattr(self, f"_m_{init_or_final}", model)
def gather(self, sta, min_period, max_period, rotate_to_rtz=False, fix_windows=False, pyflex_preset=False): """ Parse dataset for given station, gather observed and synthetic data, preprocess data and return as stream objects. :type sta: str :param sta: station to gather data for :type min_period: float :param min_period: minimum filter period in seconds :type max_period: float :param max_period: maximum filter period in seconds :type rotate_to_rtz: bool :param rotate_to_rtz: rotate components from NEZ to RTZ Config. if False, instrument response will be removed from obs. :type fix_windows: bool :param fix_windows: dont recalculate windows when gathering :type pyflex_preset: str :param pyflex_preset: overwrite the pyflex preset provided in the Config object """ if min_period is None or max_period is None: raise TypeError("must specify 'min_period' and 'max_period'") assert (sta in self.ds.waveforms.list()), f"{sta} not in ASDFDataSet" models = self.get_models() # Preprocess all traces using Pyatoa and store in dict st_obs, synthetics, windows = None, {}, {} for model, path in models.items(): # Gather synthetic data mgmt = Manager(ds=self.ds) print(path) mgmt.load(sta, path) # Overwrite some config parameters mgmt.config.save_to_ds = False mgmt.config.min_period = min_period mgmt.config.max_period = max_period if rotate_to_rtz: mgmt.config.rotate_to_rtz = rotate_to_rtz mgmt.config.component_list = ["Z", "R", "T"] if pyflex_preset: mgmt.config.pyflex_preset = pyflex_preset mgmt.config._check() mgmt.standardize() mgmt.preprocess() iter_, step_ = path.split("/") mgmt.window(fix_windows=fix_windows, iteration=iter_, step_count=step_) windows[model] = mgmt.windows synthetics[model] = mgmt.st_syn.copy() # Observed waveform will be the same if st_obs is None: st_obs = mgmt.st_obs.copy() # Internally used by plotting function self.st_obs = st_obs self.synthetics = synthetics self.windows = windows self.time_axis = self.st_obs[0].times( reftime=st_obs[0].stats.starttime - mgmt.stats.time_offset_sec)
class WindowWasher: """ Test time windowing using Pyflex presets """ def __init__(self, ds_fid): """ The class contains a PyASDF Dataset that will be used to load observed and synthetic data, based on user input """ self.ds = ASDFDataSet(ds_fid) # Initiate an empty Manager to get access to its config self.mgmt = Manager() self.info = Info() def setup(self, code): """ Load waveforms from the dataset, and preprocess for windowing """ # If Configure was run first, this will pass its config object over try: self.mgmt = Manager(ds=self.ds, config=self.mgmt.config) self.mgmt.load(code, config=False, synthetic_tag="synthetic_m00s00") self.mgmt.config.save_to_ds = False self.mgmt.standardize().preprocess() except Exception as e: raise WasherError(e) def configure(self, min_period, max_period, preset="default", **kwargs): """ Configure the Pyflex Parameters, either by passing kwargs or choosing preset :return: """ self.mgmt.config.min_period = min_period self.mgmt.config.max_period = max_period self.mgmt.config.pyflex_preset = preset self.mgmt.config.pyflex_config, unused = set_pyflex_config( choice=preset, min_period=min_period, max_period=max_period, **kwargs) if unused: for uu in unused: print(f"{uu} is not a valid Pyflex parameter") def set(self, **kwargs): """ After a Pyflex Config has been set by 'configure', can 'set' individual parameters for fine-tuning """ assert self.mgmt is not None for key in kwargs.keys(): assert hasattr(self.mgmt.config.pyflex_config, key), f"{key} no no" vars(self.mgmt.config.pyflex_config).update(**kwargs) def check(self, key): """ Check what the current kwarg value of the Pyflex config is """ assert self.mgmt is not None if hasattr(self.mgmt.config.pyflex_config, key): print(f"{key}: {getattr(self.mgmt.config.pyflex_config, key)}") else: print(f"{key}: NOT FOUND") def wash(self): """ Run Manager window() function and plot the waveforms, no map, to look at window quality. """ assert (self.mgmt is not None) self.mgmt.window() self.mgmt.plot(choice="wav", show=True, save=None)
""" It's useful to generate a fully loaded Manager object for testing purposes. Load data from the test directory and run the Manager workflow to achieve this. """ import ipdb from IPython import embed from pyatoa import Manager, Config from obspy import read, read_events, read_inventory # Generate ObsPy classes of data and dataless st_obs = read("../tests/test_data/test_obs_data_NZ_BFZ_2018p130600.ascii") st_syn = read("../tests/test_data/test_syn_data_NZ_BFZ_2018p130600.ascii") event = read_events("../tests/test_data/test_catalog_2018p130600.xml")[0] inv = read_inventory("../tests/test_data/test_dataless_NZ_BFZ.xml") # Create the Manager using read in data cfg = Config(event_id="2018p130600") mgmt = Manager(config=cfg, event=event, st_obs=st_obs, st_syn=st_syn, inv=inv) mgmt.flow() ipdb.set_trace() embed(colors="neutral")
import os from pyatoa import Manager from pyasdf import ASDFDataSet as asdf import matplotlib.pyplot as plt periods = [3, 4] with asdf("2015p768477_birch.h5") as ds: for station in ds.waveforms.list(): f, axes = plt.subplots(3) mgmt = Manager(ds=ds) try: for i, min_period in enumerate(periods): mgmt.load(station, path="i11/s05") mgmt.config.min_period = min_period mgmt.standardize().preprocess() for j, tr in enumerate(mgmt.st_obs): axes[j].plot(tr.times(), tr.data, c={0: "k", 1: "r"}[i]) mgmt.reset() axes[0].set_title(station) axes[1].set_ylabel("Displacement [m]") axes[2].set_xlabel("Time [s]") fid = f"{station}_2015p768477_{periods[0]}vs{periods[1]}s.png" plt.savefig(os.path.join("figures", fid), dpi=100) plt.close() except Exception as e: print(f"{station}: {e}") plt.close()