Пример #1
0
    def __init__(self, sc, events, project_dir, hypers=False):

        super(SeismicComposite, self).__init__(events)

        logger.debug('Setting up seismic structure ...\n')
        self.name = 'seismic'
        self._like_name = 'seis_like'
        self.correction_name = 'time_shift'

        self.engine = LocalEngine(
            store_superdirs=[sc.gf_config.store_superdir])

        if sc.responses_path is not None:
            responses_path = os.path.join(sc.responses_path,
                                          bconfig.response_file_name)
        else:
            responses_path = sc.responses_path

        # load data
        self.datahandlers = []
        for i in range(self.nevents):
            seismic_data_path = os.path.join(
                project_dir, bconfig.multi_event_seismic_data_name(i))

            logger.info('Loading seismic data for event %i'
                        ' from: %s ' % (i, seismic_data_path))
            self.datahandlers.append(
                heart.init_datahandler(seismic_config=sc,
                                       seismic_data_path=seismic_data_path,
                                       responses_path=responses_path))

        self.noise_analyser = cov.SeismicNoiseAnalyser(
            structure=sc.noise_estimator.structure,
            pre_arrival_time=sc.noise_estimator.pre_arrival_time,
            engine=self.engine,
            events=self.events,
            chop_bounds=['b', 'c'])

        self.wavemaps = []
        for i, wc in enumerate(sc.waveforms):
            if wc.include:
                wmap = heart.init_wavemap(
                    waveformfit_config=wc,
                    datahandler=self.datahandlers[wc.event_idx],
                    event=self.events[wc.event_idx],
                    mapnumber=i)

                self.wavemaps.append(wmap)
            else:
                logger.info('The waveform defined in "%s %i" config is not '
                            'included in the optimization!' % (wc.name, i))

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]),
                           name='seis_llk_%i' % t,
                           borrow=True))
Пример #2
0
    def __init__(self, gc, project_dir, sources, event, hypers=False):

        super(GeodeticSourceComposite, self).__init__(gc,
                                                      project_dir,
                                                      event,
                                                      hypers=hypers)

        self.engine = LocalEngine(
            store_superdirs=[gc.gf_config.store_superdir])

        self.sources = sources
def main(n, depths, durations, noisy, noise_factor, velocity_model, long, tmin,
         tmax, save_file, save_dir, _run):

    engine = LocalEngine(store_dirs=[velocity_model])

    target = gf.Target(quantity='displacement',
                       lat=0,
                       lon=long,
                       store_id=velocity_model,
                       codes=('NET', 'STA', 'LOC', 'E'),
                       tmin=tmin,
                       tmax=tmax)

    df = createFocalMechanisms(n,
                               depths,
                               durations,
                               target,
                               engine,
                               noisy=noisy,
                               noise_factor=noise_factor)

    if os.path.exists(save_dir) == False: os.mkdir(save_dir)

    path = save_dir + save_file

    df.to_pickle(path)

    ex.add_artifact(path)
Пример #4
0
def prep_data_batch(data_dir, store_id, stations=None, pre=0.5,
                    post=3, reference_event=None, min_len=420,
                    pick_sigma=0.02):
    engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores'])
    store = engine.get_store(store_id)
    mod = store.config.earthmodel_1d
    gf_freq = store.config.sample_rate
    cake_phase = cake.PhaseDef("P")
    phase_list = [cake_phase]
    events = []
    waveforms = []
    waveforms_shifted = []
    events = scedc_util.scedc_fm_to_pyrocko(file)
    labels = labels_from_events(events)
    pathlist = Path(data_dir).glob('ev_0/')
    for path in sorted(pathlist):
        try:
            targets = []
            path = str(path)+"/"
            event = model.load_events(path+"event.txt")[0]
            traces_loaded = io.load(path+"traces.mseed")
            stations_unsorted = model.load_stations(data_dir+"stations.pf")
            for st in stations_unsorted:
                st.dist = orthodrome.distance_accurate50m(st.lat, st.lon,
                                                          event.lat,
                                                          event.lon)
                st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat,
                                            event.lon)
            stations = sorted(stations_unsorted, key=lambda x: x.dist,
                              reverse=True)

            traces_processed = []
            traces = check_traces(traces_loaded, stations, min_len=min_len)
            traces_processed, nsamples = wp.process_loaded_waveforms(traces,
                                                                  stations,
                                                                  event,
                                                                  gf_freq,
                                                                  mod,
                                                                  pre, post)
            events.append(event)
            waveforms.append(traces_processed)
        except:
            pass
    return waveforms, nsamples, events, waveforms_shifted
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event)==1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % []) 
            else:
                station = station[0]
            targets = [station_to_target(station, quantity=settings.quantity, store_id=settings.store_id)]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' % error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets, sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces)==1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            mesh_fwidth = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            misfits_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc-fc*2./5.
                fr_max = fc+fc*2./5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc+width*0.5), order=4, type='low'),
                            ButterworthResponse(corner=float(fc-width*0.5), order=4, type='high')]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(candidate=candidate, setup=settings.misfit_setup, debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(len(test_depths)+1, 1, i+1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m/n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print 'best fits: \n', i_best_fits
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits*100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
Пример #6
0
from pyrocko.gf import LocalEngine, Target, DCSource
from pyrocko import trace
from pyrocko.gui_util import PhaseMarker

# We need a pyrocko.gf.Engine object which provides us with the traces
# extracted from the store. In this case we are going to use a local
# engine since we are going to query a local store.
engine = LocalEngine(store_superdirs=['/media/usb/gf_stores'])

# The store we are going extract data from:
store_id = 'crust2_dd'

# Define a list of pyrocko.gf.Target objects, representing the recording
# devices. In this case one station with a three component sensor will
# serve fine for demonstation.
channel_codes = 'ENZ'
targets = [
    Target(lat=10.,
           lon=10.,
           store_id=store_id,
           codes=('', 'STA', '', channel_code))
    for channel_code in channel_codes
]

# Let's use a double couple source representation.
source_dc = DCSource(lat=11.,
                     lon=11.,
                     depth=10000.,
                     strike=20.,
                     dip=40.,
                     rake=60.,
Пример #7
0
class GeodeticSourceComposite(GeodeticComposite):
    """
    Comprises how to solve the non-linear geodetic forward model.

    Parameters
    ----------
    gc : :class:`config.GeodeticConfig`
        configuration object containing seismic setup parameters
    project_dir : str
        directory of the model project, where to find the data
    sources : list
        of :class:`pyrocko.gf.seismosizer.Source`
    event : :class:`pyrocko.model.Event`
        contains information of reference event, coordinates of reference
        point and source time
    hypers : boolean
        if true initialise object for hyper parameter optimization
    """
    def __init__(self, gc, project_dir, sources, event, hypers=False):

        super(GeodeticSourceComposite, self).__init__(gc,
                                                      project_dir,
                                                      event,
                                                      hypers=hypers)

        self.engine = LocalEngine(
            store_superdirs=[gc.gf_config.store_superdir])

        self.sources = sources

    def __getstate__(self):
        self.engine.close_cashed_stores()
        return self.__dict__.copy()

    def point2sources(self, point):
        """
        Updates the composite source(s) (in place) with the point values.
        """
        tpoint = copy.deepcopy(point)
        tpoint = utility.adjust_point_units(tpoint)

        # remove hyperparameters from point
        hps = self.config.get_hypernames()

        for hyper in hps:
            if hyper in tpoint:
                tpoint.pop(hyper)

        source_params = list(self.sources[0].keys())
        for param in list(tpoint.keys()):
            if param not in source_params:
                tpoint.pop(param)

        source_points = utility.split_point(tpoint)
        for i, source in enumerate(self.sources):
            utility.update_source(source, **source_points[i])
            # reset source time may result in store error otherwise
            source.time = 0.

    def get_formula(self, input_rvs, fixed_rvs, hyperparams, problem_config):
        """
        Get geodetic likelihood formula for the model built. Has to be called
        within a with model context.
        Part of the pymc3 model.

        Parameters
        ----------
        input_rvs : dict
            of :class:`pymc3.distribution.Distribution`
        fixed_rvs : dict
            of :class:`numpy.array`
        hyperparams : dict
            of :class:`pymc3.distribution.Distribution`
        problem_config : :class:`config.ProblemConfig`

        Returns
        -------
        posterior_llk : :class:`theano.tensor.Tensor`
        """
        hp_specific = self.config.dataset_specific_residual_noise_estimation

        self.input_rvs = input_rvs
        self.fixed_rvs = fixed_rvs

        logger.info('Geodetic optimization on: \n '
                    '%s' % ', '.join(self.input_rvs.keys()))

        self.input_rvs.update(fixed_rvs)

        t0 = time()
        disp = self.get_synths(self.input_rvs)
        t1 = time()
        logger.debug('Geodetic forward model on test model takes: %f' %
                     (t1 - t0))

        los_disp = (disp * self.slos_vectors).sum(axis=1)

        residuals = self.Bij.srmap(
            tt.cast((self.sdata - los_disp) * self.sodws, tconfig.floatX))

        self.init_hierarchicals(problem_config)
        if len(self.hierarchicals) > 0:
            residuals = self.remove_ramps(residuals)

        logpts = multivariate_normal_chol(self.datasets,
                                          self.weights,
                                          hyperparams,
                                          residuals,
                                          hp_specific=hp_specific)

        llk = Deterministic(self._like_name, logpts)
        return llk.sum()
Пример #8
0
                self.amplitudes = self._amplitudes
        else:
            self.amplitudes = np.ones(1)

        return times, self.amplitudes

    def base_key(self):
        # method returns STF name and the values
        return (self.__class__.__name__, self.duration, self.anchor)


if __name__ == '__main__':

    model = 'crust2_m5_hardtop_16Hz'

    engine = LocalEngine(store_dirs=[model])

    target = gf.Target(quantity='displacement',
                       lat=0,
                       lon=1,
                       store_id=model,
                       codes=('NET', 'STA', 'LOC', 'E'),
                       tmin=10,
                       tmax=75)

    DC_tensors = [createMT_DC(mag) for mag in 4 * np.random.rand(1000)]
    CLVD_tensors = [createMT_CLVD(mag) for mag in 4 * np.random.rand(1000)]
    Iso_tensors = [createMT_Isotropic(mag) for mag in 4 * np.random.rand(1000)]

    moment_tensors = DC_tensors + CLVD_tensors + Iso_tensors
Пример #9
0
class SeismicComposite(Composite):
    """
    Comprises how to solve the non-linear seismic forward model.

    Parameters
    ----------
    sc : :class:`config.SeismicConfig`
        configuration object containing seismic setup parameters
    events: list
        of :class:`pyrocko.model.Event`
    project_dir : str
        directory of the model project, where to find the data
    hypers : boolean
        if true initialise object for hyper parameter optimization
    """
    _datasets = None
    _weights = None
    _targets = None
    _hierarchicalnames = None

    def __init__(self, sc, events, project_dir, hypers=False):

        super(SeismicComposite, self).__init__(events)

        logger.debug('Setting up seismic structure ...\n')
        self.name = 'seismic'
        self._like_name = 'seis_like'
        self.correction_name = 'time_shift'

        self.engine = LocalEngine(
            store_superdirs=[sc.gf_config.store_superdir])

        if sc.responses_path is not None:
            responses_path = os.path.join(sc.responses_path,
                                          bconfig.response_file_name)
        else:
            responses_path = sc.responses_path

        # load data
        self.datahandlers = []
        for i in range(self.nevents):
            seismic_data_path = os.path.join(
                project_dir, bconfig.multi_event_seismic_data_name(i))

            logger.info('Loading seismic data for event %i'
                        ' from: %s ' % (i, seismic_data_path))
            self.datahandlers.append(
                heart.init_datahandler(seismic_config=sc,
                                       seismic_data_path=seismic_data_path,
                                       responses_path=responses_path))

        self.noise_analyser = cov.SeismicNoiseAnalyser(
            structure=sc.noise_estimator.structure,
            pre_arrival_time=sc.noise_estimator.pre_arrival_time,
            engine=self.engine,
            events=self.events,
            chop_bounds=['b', 'c'])

        self.wavemaps = []
        for i, wc in enumerate(sc.waveforms):
            if wc.include:
                wmap = heart.init_wavemap(
                    waveformfit_config=wc,
                    datahandler=self.datahandlers[wc.event_idx],
                    event=self.events[wc.event_idx],
                    mapnumber=i)

                self.wavemaps.append(wmap)
            else:
                logger.info('The waveform defined in "%s %i" config is not '
                            'included in the optimization!' % (wc.name, i))

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]),
                           name='seis_llk_%i' % t,
                           borrow=True))

    def _hyper2wavemap(self, hypername):

        dummy = '_'.join(hypername.split('_')[1:-1])
        for wmap in self.wavemaps:
            if wmap._mapid == dummy:
                return wmap

        raise ValueError('No waveform mapping found for hyperparameter! %s' %
                         hypername)

    def get_hypersize(self, hp_name):
        """
        Return size of the hyperparameter

        Parameters
        ----------
        hp_name: str
            of hyperparameter name

        Returns
        -------
        int
        """
        if self.config.dataset_specific_residual_noise_estimation:
            wmap = self._hyper2wavemap(hp_name)
            return wmap.hypersize
        else:
            return 1

    def __getstate__(self):
        self.engine.close_cashed_stores()
        return self.__dict__.copy()

    def analyse_noise(self, tpoint=None, chop_bounds=['b', 'c']):
        """
        Analyse seismic noise in datatraces and set
        data-covariance matrixes accordingly.
        """
        if self.config.noise_estimator.structure == 'non-toeplitz':
            results = self.assemble_results(tpoint,
                                            order='wmap',
                                            chop_bounds=chop_bounds)
        else:
            results = [None] * len(self.wavemaps)

        for wmap, wmap_results in zip(self.wavemaps, results):
            logger.info(
                'Retrieving seismic data-covariances with structure "%s" '
                'for %s ...' %
                (self.config.noise_estimator.structure, wmap._mapid))

            cov_ds_seismic = self.noise_analyser.get_data_covariances(
                wmap=wmap,
                results=wmap_results,
                sample_rate=self.config.gf_config.sample_rate,
                chop_bounds=chop_bounds)

            for j, trc in enumerate(wmap.datasets):
                if trc.covariance is None:
                    trc.covariance = heart.Covariance(data=cov_ds_seismic[j])
                else:
                    trc.covariance.data = cov_ds_seismic[j]

                if int(trc.covariance.data.sum()) == trc.data_len():
                    logger.warning('Data covariance is identity matrix!'
                                   ' Please double check!!!')

    def init_hierarchicals(self, problem_config):
        """
        Initialise random variables for temporal station corrections.
        """
        hierarchicals = problem_config.hierarchicals
        self._hierarchicalnames = []
        if not self.config.station_corrections and \
                self.correction_name in hierarchicals:
            raise ConfigInconsistentError(
                'Station corrections disabled, but they are defined'
                ' in the problem configuration!')

        if self.config.station_corrections and \
                self.correction_name not in hierarchicals:
            raise ConfigInconsistentError(
                'Station corrections enabled, but they are not defined'
                ' in the problem configuration!')

        if self.correction_name in hierarchicals:
            logger.info(
                'Estimating time shift for each station and waveform map...')
            for wmap in self.wavemaps:
                hierarchical_name = wmap.time_shifts_id
                nhierarchs = len(wmap.get_station_names())

                logger.info('For %s with %i shifts' %
                            (hierarchical_name, nhierarchs))

                if hierarchical_name in hierarchicals:
                    logger.info('Using wavemap specific imported:'
                                ' %s ' % hierarchical_name)
                    param = hierarchicals[hierarchical_name]
                else:
                    logger.info('Using global %s' % self.correction_name)
                    param = copy.deepcopy(
                        problem_config.hierarchicals[self.correction_name])
                    param.lower = num.repeat(param.lower, nhierarchs)
                    param.upper = num.repeat(param.upper, nhierarchs)
                    param.testvalue = num.repeat(param.testvalue, nhierarchs)

                if hierarchical_name not in self.hierarchicals:
                    if not num.array_equal(param.lower, param.upper):
                        kwargs = dict(name=hierarchical_name,
                                      shape=param.dimension,
                                      lower=param.lower,
                                      upper=param.upper,
                                      testval=param.testvalue,
                                      transform=None,
                                      dtype=tconfig.floatX)

                        try:
                            self.hierarchicals[hierarchical_name] = Uniform(
                                **kwargs)
                        except TypeError:
                            kwargs.pop('name')
                            self.hierarchicals[hierarchical_name] = \
                                Uniform.dist(**kwargs)

                        self._hierarchicalnames.append(hierarchical_name)
                    else:
                        logger.info(
                            'not solving for %s, got fixed at %s' %
                            (param.name,
                             utility.list2string(param.lower.flatten())))
                        self.hierarchicals[hierarchical_name] = param.lower

    def export(self,
               point,
               results_path,
               stage_number,
               fix_output=False,
               force=False,
               update=False,
               chop_bounds=['b', 'c']):
        """
        Save results for given point to result path.
        """
        def save_covs(wmap, cov_mat='pred_v'):
            """
            Save covariance matrixes of given attribute
            """
            covs = {
                utility.list2string(dataset.nslc_id):
                getattr(dataset.covariance, cov_mat)
                for dataset in wmap.datasets
            }

            outname = os.path.join(
                results_path, '%s_C_%s_%s' % ('seismic', cov_mat, wmap._mapid))
            logger.info('"%s" to: %s' % (wmap._mapid, outname))
            num.savez(outname, **covs)

        from pyrocko import io

        # synthetics and data
        results = self.assemble_results(point, chop_bounds=chop_bounds)
        for traces, attribute in heart.results_for_export(results=results,
                                                          datatype='seismic'):

            filename = '%s_%i.mseed' % (attribute, stage_number)
            outpath = os.path.join(results_path, filename)
            try:
                io.save(traces, outpath, overwrite=force)
            except io.mseed.CodeTooLong:
                if fix_output:
                    for tr in traces:
                        tr.set_station(tr.station[-5::])
                        tr.set_location(
                            str(self.config.gf_config.reference_model_idx))

                    io.save(traces, outpath, overwrite=force)
                else:
                    raise ValueError(
                        'Some station codes are too long! '
                        '(the --fix_output option will truncate to '
                        'last 5 characters!)')

        # export stdz residuals
        self.analyse_noise(point, chop_bounds=chop_bounds)
        if update:
            logger.info('Saving velocity model covariance matrixes...')
            self.update_weights(point, chop_bounds=chop_bounds)
            for wmap in self.wavemaps:
                save_covs(wmap, 'pred_v')

        logger.info('Saving data covariance matrixes...')
        for wmap in self.wavemaps:
            save_covs(wmap, 'data')

    def init_weights(self):
        """
        Initialise shared weights in wavemaps.
        """
        logger.info('Initialising weights ...')
        for wmap in self.wavemaps:
            weights = []
            for j, trc in enumerate(wmap.datasets):
                icov = trc.covariance.chol_inverse
                weights.append(
                    shared(icov,
                           name='seis_%s_weight_%i' % (wmap._mapid, j),
                           borrow=True))

            wmap.add_weights(weights)

    def get_all_station_names(self):
        """
        Returns list of station names in the order of wavemaps.
        """
        us = []
        for wmap in self.wavemaps:
            us.extend(wmap.get_station_names())

        return us

    def get_unique_time_shifts_ids(self):
        """
        Return unique time_shifts ids from wavemaps, which are keys to
        hierarchical RVs of station corrections
        """
        ts = []
        for wmap in self.wavemaps:
            ts.append(wmap.time_shifts_id)

        return utility.unique_list(ts)

    def get_unique_station_names(self):
        """
        Return unique station names from all wavemaps
        """
        return utility.unique_list(self.get_all_station_names())

    @property
    def n_t(self):
        return sum(wmap.n_t for wmap in self.wavemaps)

    @property
    def datasets(self):
        if self._datasets is None:
            ds = []
            for wmap in self.wavemaps:
                ds.extend(wmap.datasets)

            self._datasets = ds
        return self._datasets

    @property
    def weights(self):
        if self._weights is None or len(self._weights) == 0:
            ws = []
            for wmap in self.wavemaps:
                if wmap.weights:
                    ws.extend(wmap.weights)

            self._weights = ws
        return self._weights

    @property
    def targets(self):
        if self._targets is None:
            ts = []
            for wmap in self.wavemaps:
                ts.extend(wmap.targets)

            self._targets = ts
        return self._targets

    def assemble_results(self,
                         point,
                         chop_bounds=['a', 'd'],
                         order='list',
                         outmode='stacked_traces'):
        """
        Assemble seismic traces for given point in solution space.

        Parameters
        ----------
        point : :func:`pymc3.Point`
            Dictionary with model parameters

        Returns
        -------
        List with :class:`heart.SeismicResult`
        """
        if point is None:
            raise ValueError('A point has to be provided!')

        logger.debug('Assembling seismic waveforms ...')

        syn_proc_traces, obs_proc_traces = self.get_synthetics(
            point, outmode=outmode, chop_bounds=chop_bounds, order='wmap')

        # will yield exactly the same as previous call needs wmap.prepare data
        # to be aware of taper_tolerance_factor
        # DEPRECATED but keep for now
        # syn_filt_traces, obs_filt_traces = self.get_synthetics(
        #    point, outmode=outmode, taper_tolerance_factor=0.,
        #    chop_bounds=chop_bounds, order='wmap')
        # syn_filt_traces, obs_filt_traces = syn_proc_traces, obs_proc_traces
        #from pyrocko import trace
        #trace.snuffle(syn_proc_traces + obs_proc_traces)

        results = []
        for i, wmap in enumerate(self.wavemaps):
            wc = wmap.config
            at = wc.arrival_taper

            wmap_results = []
            for j, obs_tr in enumerate(obs_proc_traces[i]):

                taper = at.get_pyrocko_taper(float(obs_tr.tmin - at.a))

                if outmode != 'tapered_data':
                    source_contributions = [syn_proc_traces[i][j]]
                else:
                    source_contributions = syn_proc_traces[i][j]

                wmap_results.append(
                    heart.SeismicResult(
                        point=point,
                        processed_obs=obs_tr,
                        source_contributions=source_contributions,
                        taper=taper))

            if order == 'list':
                results.extend(wmap_results)

            elif order == 'wmap':
                results.append(wmap_results)

            else:
                raise ValueError('Order "%s" is not supported' % order)

        return results

    def update_llks(self, point):
        """
        Update posterior likelihoods of the composite with respect to one point
        in the solution space.

        Parameters
        ----------
        point : dict
            with numpy array-like items and variable name keys
        """
        results = self.assemble_results(point, chop_bounds=['b', 'c'])
        for k, result in enumerate(results):
            choli = self.datasets[k].covariance.chol_inverse
            tmp = choli.dot(result.processed_res.ydata)
            _llk = num.asarray([num.dot(tmp, tmp)])
            self._llks[k].set_value(_llk)

    def get_standardized_residuals(self, point, chop_bounds=['b', 'c']):
        """
        Parameters
        ----------
        point : dict
            with parameters to point in solution space to calculate
            standardized residuals

        Returns
        -------
        dict of arrays of standardized residuals,
            keys are nslc_ids
        """
        results = self.assemble_results(point,
                                        order='list',
                                        chop_bounds=chop_bounds)
        self.update_weights(point, chop_bounds=chop_bounds)

        counter = utility.Counter()
        hp_specific = self.config.dataset_specific_residual_noise_estimation
        stdz_res = OrderedDict()
        for data_trc, result in zip(self.datasets, results):
            hp_name = get_hyper_name(data_trc)
            if hp_specific:
                hp = point[hp_name][counter(hp_name)]
            else:
                hp = point[hp_name]

            choli = num.linalg.inv(data_trc.covariance.chol * num.exp(hp) / 2.)
            stdz_res[data_trc.nslc_id] = choli.dot(
                result.processed_res.get_ydata())

        return stdz_res

    def get_variance_reductions(self,
                                point,
                                results=None,
                                weights=None,
                                chop_bounds=['a', 'd']):
        """
        Parameters
        ----------
        point : dict
            with parameters to point in solution space to calculate
            variance reductions

        Returns
        -------
        dict of floats,
            keys are nslc_ids
        """
        if results is None:
            results = self.assemble_results(point,
                                            order='list',
                                            chop_bounds=chop_bounds)

        ndatasets = len(self.datasets)

        assert len(results) == ndatasets

        if weights is None:
            self.analyse_noise(point, chop_bounds=chop_bounds)
            self.update_weights(point, chop_bounds=chop_bounds)
            weights = self.weights

        nweights = len(weights)
        assert nweights == ndatasets

        logger.debug('n weights %i , n datasets %i' % (nweights, ndatasets))

        assert nweights == ndatasets

        logger.debug('Calculating variance reduction for solution ...')

        var_reds = OrderedDict()
        for data_trc, weight, result in zip(self.datasets, weights, results):

            icov = data_trc.covariance.inverse

            data = result.processed_obs.get_ydata()
            residual = result.processed_res.get_ydata()

            nom = residual.T.dot(icov).dot(residual)
            denom = data.T.dot(icov).dot(data)

            logger.debug('nom %f, denom %f' % (float(nom), float(denom)))
            var_red = 1 - (nom / denom)

            nslc_id = utility.list2string(data_trc.nslc_id)
            logger.debug('Variance reduction for %s is %f' %
                         (nslc_id, var_red))

            if 0:
                from matplotlib import pyplot as plt
                fig, ax = plt.subplots(1, 1)
                im = ax.imshow(data_trc.covariance.data)
                plt.colorbar(im)
                plt.show()

            var_reds[nslc_id] = var_red

        return var_reds
Пример #10
0
class SeismicComposite(Composite):
    """
    Comprises how to solve the non-linear seismic forward model.

    Parameters
    ----------
    sc : :class:`config.SeismicConfig`
        configuration object containing seismic setup parameters
    event: :class:`pyrocko.model.Event`
    project_dir : str
        directory of the model project, where to find the data
    hypers : boolean
        if true initialise object for hyper parameter optimization
    """
    _datasets = None
    _weights = None
    _targets = None

    def __init__(self, sc, event, project_dir, hypers=False):

        super(SeismicComposite, self).__init__()

        logger.debug('Setting up seismic structure ...\n')
        self.name = 'seismic'
        self._like_name = 'seis_like'
        self.correction_name = 'time_shift'

        self.event = event
        self.engine = LocalEngine(
            store_superdirs=[sc.gf_config.store_superdir])

        seismic_data_path = os.path.join(
            project_dir, bconfig.seismic_data_name)

        self.datahandler = heart.init_datahandler(
            seismic_config=sc, seismic_data_path=seismic_data_path)

        self.noise_analyser = cov.SeismicNoiseAnalyser(
            structure=sc.noise_estimator.structure,
            pre_arrival_time=sc.noise_estimator.pre_arrival_time,
            engine=self.engine,
            event=self.event,
            chop_bounds=['b', 'c'])

        self.wavemaps = []
        for i, wc in enumerate(sc.waveforms):
            if wc.include:
                wmap = heart.init_wavemap(
                    waveformfit_config=wc,
                    datahandler=self.datahandler,
                    event=event,
                    mapnumber=i)

                self.wavemaps.append(wmap)
            else:
                logger.info(
                    'The waveform defined in "%s %i" config is not '
                    'included in the optimization!' % (wc.name, i))

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(
                        num.array([1.]), name='seis_llk_%i' % t, borrow=True))

    def __getstate__(self):
        self.engine.close_cashed_stores()
        return self.__dict__.copy()

    def analyse_noise(self, tpoint=None):
        """
        Analyse seismic noise in datatraces and set
        data-covariance matrixes accordingly.
        """
        if self.config.noise_estimator.structure == 'non-toeplitz':
            results = self.assemble_results(
                tpoint, order='wmap', chop_bounds=['b', 'c'])
        else:
            results = [None] * len(self.wavemaps)

        for wmap, wmap_results in zip(self.wavemaps, results):
            logger.info(
                'Retrieving seismic data-covariances with structure "%s" '
                'for %s ...' % (
                    self.config.noise_estimator.structure, wmap._mapid))

            cov_ds_seismic = self.noise_analyser.get_data_covariances(
                wmap=wmap, results=wmap_results,
                sample_rate=self.config.gf_config.sample_rate)

            for j, trc in enumerate(wmap.datasets):
                if trc.covariance is None:
                    trc.covariance = heart.Covariance(data=cov_ds_seismic[j])
                else:
                    trc.covariance.data = cov_ds_seismic[j]

                if int(trc.covariance.data.sum()) == trc.data_len():
                    logger.warn('Data covariance is identity matrix!'
                                ' Please double check!!!')

    def init_hierarchicals(self, problem_config):
        """
        Initialise random variables for temporal station corrections.
        """
        if not self.config.station_corrections and \
                self.correction_name in problem_config.hierarchicals:
                raise ConfigInconsistentError(
                    'Station corrections disabled, but they are defined'
                    ' in the problem configuration!')

        if self.config.station_corrections and \
                self.correction_name not in problem_config.hierarchicals:
                raise ConfigInconsistentError(
                    'Station corrections enabled, but they are not defined'
                    ' in the problem configuration!')

        if self.correction_name in problem_config.hierarchicals:
            nhierarchs = len(self.get_unique_stations())
            param = problem_config.hierarchicals[self.correction_name]
            logger.info(
                'Estimating time shift for each station...')
            kwargs = dict(
                name=self.correction_name,
                shape=nhierarchs,
                lower=num.repeat(param.lower, nhierarchs),
                upper=num.repeat(param.upper, nhierarchs),
                testval=num.repeat(param.testvalue, nhierarchs),
                transform=None,
                dtype=tconfig.floatX)

            try:
                station_corrs_rv = Uniform(**kwargs)

            except TypeError:
                kwargs.pop('name')
                station_corrs_rv = Uniform.dist(**kwargs)

            self.hierarchicals[self.correction_name] = station_corrs_rv
        else:
            nhierarchs = 0

    def init_weights(self):
        """
        Initialise shared weights in wavemaps.
        """
        for wmap in self.wavemaps:
            weights = []
            for j, trc in enumerate(wmap.datasets):
                icov = trc.covariance.chol_inverse
                weights.append(
                    shared(
                        icov,
                        name='seis_%s_weight_%i' % (wmap._mapid, j),
                        borrow=True))

            wmap.add_weights(weights)

    def get_unique_stations(self):
        us = []
        for wmap in self.wavemaps:
            us.extend(wmap.get_station_names())
        return utility.unique_list(us)

    @property
    def n_t(self):
        return sum(wmap.n_t for wmap in self.wavemaps)

    @property
    def datasets(self):
        if self._datasets is None:
            ds = []
            for wmap in self.wavemaps:
                ds.extend(wmap.datasets)

            self._datasets = ds
        return self._datasets

    @property
    def weights(self):
        if self._weights is None:
            ws = []
            for wmap in self.wavemaps:
                ws.extend(wmap.weights)

            self._weights = ws
        return self._weights

    @property
    def targets(self):
        if self._targets is None:
            ts = []
            for wmap in self.wavemaps:
                ts.extend(wmap.targets)

            self._targets = ts
        return self._targets

    def assemble_results(
            self, point, chop_bounds=['a', 'd'], order='list',
            outmode='stacked_traces'):
        """
        Assemble seismic traces for given point in solution space.

        Parameters
        ----------
        point : :func:`pymc3.Point`
            Dictionary with model parameters

        Returns
        -------
        List with :class:`heart.SeismicResult`
        """
        if point is None:
            raise ValueError('A point has to be provided!')

        logger.debug('Assembling seismic waveforms ...')

        syn_proc_traces, obs_proc_traces = self.get_synthetics(
            point, outmode=outmode,
            chop_bounds=chop_bounds, order='wmap')

        # will yield exactly the same as previous call needs wmap.prepare data
        # to be aware of taper_tolerance_factor
        syn_filt_traces, obs_filt_traces = self.get_synthetics(
            point, outmode=outmode, taper_tolerance_factor=0.,
            chop_bounds=chop_bounds, order='wmap')

        results = []
        for i, wmap in enumerate(self.wavemaps):
            wc = wmap.config
            at = wc.arrival_taper

            wmap_results = []
            for j, obs_tr in enumerate(obs_proc_traces[i]):

                dtrace_proc = obs_tr.copy()
                dtrace_proc.set_ydata(
                    (obs_tr.get_ydata() - syn_proc_traces[i][j].get_ydata()))

                dtrace_filt = obs_filt_traces[i][j].copy()
                dtrace_filt.set_ydata(
                    (obs_filt_traces[i][j].get_ydata() -
                        syn_filt_traces[i][j].get_ydata()))

                taper = at.get_pyrocko_taper(
                    float(obs_tr.tmin - at.a))

                wmap_results.append(heart.SeismicResult(
                    processed_obs=obs_tr,
                    processed_syn=syn_proc_traces[i][j],
                    processed_res=dtrace_proc,
                    filtered_obs=obs_filt_traces[i][j],
                    filtered_syn=syn_filt_traces[i][j],
                    filtered_res=dtrace_filt,
                    taper=taper))

            if order == 'list':
                results.extend(wmap_results)

            elif order == 'wmap':
                results.append(wmap_results)

            else:
                raise ValueError('Order "%s" is not supported' % order)

        return results

    def update_llks(self, point):
        """
        Update posterior likelihoods of the composite with respect to one point
        in the solution space.

        Parameters
        ----------
        point : dict
            with numpy array-like items and variable name keys
        """
        results = self.assemble_results(point, chop_bounds=['b', 'c'])
        for k, result in enumerate(results):
            choli = self.datasets[k].covariance.chol_inverse
            tmp = choli.dot(result.processed_res.ydata)
            _llk = num.asarray([num.dot(tmp, tmp)])
            self._llks[k].set_value(_llk)
Пример #11
0
    def __init__(self, sc, event, project_dir, hypers=False):

        super(SeismicComposite, self).__init__()

        logger.debug('Setting up seismic structure ...\n')
        self.name = 'seismic'
        self._like_name = 'seis_like'
        self.correction_name = 'time_shift'

        self.event = event
        self.engine = LocalEngine(
            store_superdirs=[sc.gf_config.store_superdir])

        seismic_data_path = os.path.join(project_dir,
                                         bconfig.seismic_data_name)

        self.datahandler = heart.init_datahandler(
            seismic_config=sc, seismic_data_path=seismic_data_path)

        self.wavemaps = []
        for wc in sc.waveforms:
            if wc.include:
                wmap = heart.init_wavemap(waveformfit_config=wc,
                                          datahandler=self.datahandler,
                                          event=event)

                if sc.calc_data_cov:
                    logger.info('Estimating seismic data-covariances '
                                'for %s ...\n' % wmap.name)

                    cov_ds_seismic = cov.seismic_data_covariance(
                        data_traces=wmap.datasets,
                        filterer=wc.filterer,
                        sample_rate=sc.gf_config.sample_rate,
                        arrival_taper=wc.arrival_taper,
                        engine=self.engine,
                        event=self.event,
                        targets=wmap.targets)
                else:
                    logger.info('No data-covariance estimation, using imported'
                                ' covariances...\n')

                    cov_ds_seismic = []
                    at = wc.arrival_taper
                    n_samples = int(
                        num.ceil(at.duration * sc.gf_config.sample_rate))

                    for trc in wmap.datasets:
                        if trc.covariance is None:
                            logger.warn('No data covariance given/estimated! '
                                        'Setting default: eye')
                            cov_ds_seismic.append(num.eye(n_samples))
                        else:
                            data_cov = trc.covariance.data
                            if data_cov.shape[0] != n_samples:
                                raise ValueError(
                                    'Imported covariance %i does not agree '
                                    ' with taper duration %i!' %
                                    (data_cov.shape[0], n_samples))
                            cov_ds_seismic.append(data_cov)

                weights = []
                for t, trc in enumerate(wmap.datasets):
                    trc.covariance = heart.Covariance(data=cov_ds_seismic[t])
                    if int(trc.covariance.data.sum()) == trc.data_len():
                        logger.warn('Data covariance is identity matrix!'
                                    ' Please double check!!!')
                    icov = trc.covariance.chol_inverse
                    weights.append(
                        shared(icov,
                               name='seis_%s_weight_%i' % (wc.name, t),
                               borrow=True))

                wmap.add_weights(weights)

                self.wavemaps.append(wmap)
            else:
                logger.info('The waveform defined in "%s" config is not '
                            'included in the optimization!' % wc.name)

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]),
                           name='seis_llk_%i' % t,
                           borrow=True))
Пример #12
0
class SeismicComposite(Composite):
    """
    Comprises how to solve the non-linear seismic forward model.

    Parameters
    ----------
    sc : :class:`config.SeismicConfig`
        configuration object containing seismic setup parameters
    event: :class:`pyrocko.model.Event`
    project_dir : str
        directory of the model project, where to find the data
    hypers : boolean
        if true initialise object for hyper parameter optimization
    """
    _datasets = None
    _weights = None
    _targets = None

    def __init__(self, sc, event, project_dir, hypers=False):

        super(SeismicComposite, self).__init__()

        logger.debug('Setting up seismic structure ...\n')
        self.name = 'seismic'
        self._like_name = 'seis_like'
        self.correction_name = 'time_shift'

        self.event = event
        self.engine = LocalEngine(
            store_superdirs=[sc.gf_config.store_superdir])

        seismic_data_path = os.path.join(project_dir,
                                         bconfig.seismic_data_name)

        self.datahandler = heart.init_datahandler(
            seismic_config=sc, seismic_data_path=seismic_data_path)

        self.wavemaps = []
        for wc in sc.waveforms:
            if wc.include:
                wmap = heart.init_wavemap(waveformfit_config=wc,
                                          datahandler=self.datahandler,
                                          event=event)

                if sc.calc_data_cov:
                    logger.info('Estimating seismic data-covariances '
                                'for %s ...\n' % wmap.name)

                    cov_ds_seismic = cov.seismic_data_covariance(
                        data_traces=wmap.datasets,
                        filterer=wc.filterer,
                        sample_rate=sc.gf_config.sample_rate,
                        arrival_taper=wc.arrival_taper,
                        engine=self.engine,
                        event=self.event,
                        targets=wmap.targets)
                else:
                    logger.info('No data-covariance estimation, using imported'
                                ' covariances...\n')

                    cov_ds_seismic = []
                    at = wc.arrival_taper
                    n_samples = int(
                        num.ceil(at.duration * sc.gf_config.sample_rate))

                    for trc in wmap.datasets:
                        if trc.covariance is None:
                            logger.warn('No data covariance given/estimated! '
                                        'Setting default: eye')
                            cov_ds_seismic.append(num.eye(n_samples))
                        else:
                            data_cov = trc.covariance.data
                            if data_cov.shape[0] != n_samples:
                                raise ValueError(
                                    'Imported covariance %i does not agree '
                                    ' with taper duration %i!' %
                                    (data_cov.shape[0], n_samples))
                            cov_ds_seismic.append(data_cov)

                weights = []
                for t, trc in enumerate(wmap.datasets):
                    trc.covariance = heart.Covariance(data=cov_ds_seismic[t])
                    if int(trc.covariance.data.sum()) == trc.data_len():
                        logger.warn('Data covariance is identity matrix!'
                                    ' Please double check!!!')
                    icov = trc.covariance.chol_inverse
                    weights.append(
                        shared(icov,
                               name='seis_%s_weight_%i' % (wc.name, t),
                               borrow=True))

                wmap.add_weights(weights)

                self.wavemaps.append(wmap)
            else:
                logger.info('The waveform defined in "%s" config is not '
                            'included in the optimization!' % wc.name)

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]),
                           name='seis_llk_%i' % t,
                           borrow=True))

    def __getstate__(self):
        self.engine.close_cashed_stores()
        return self.__dict__.copy()

    def init_hierarchicals(self, problem_config):
        """
        Initialise random variables for temporal station corrections.
        """
        if not self.config.station_corrections and \
                self.correction_name in problem_config.hierarchicals:
            raise ConfigInconsistentError(
                'Station corrections disabled, but they are defined'
                ' in the problem configuration!')

        if self.config.station_corrections and \
                self.correction_name not in problem_config.hierarchicals:
            raise ConfigInconsistentError(
                'Station corrections enabled, but they are not defined'
                ' in the problem configuration!')

        if self.correction_name in problem_config.hierarchicals:
            nhierarchs = len(self.get_unique_stations())
            param = problem_config.hierarchicals[self.correction_name]
            logger.info('Estimating time shift for each station...')
            kwargs = dict(name=self.correction_name,
                          shape=nhierarchs,
                          lower=num.repeat(param.lower, nhierarchs),
                          upper=num.repeat(param.upper, nhierarchs),
                          testval=num.repeat(param.testvalue, nhierarchs),
                          transform=None,
                          dtype=tconfig.floatX)

            try:
                station_corrs_rv = Uniform(**kwargs)

            except TypeError:
                kwargs.pop('name')
                station_corrs_rv = Uniform.dist(**kwargs)

            self.hierarchicals[self.correction_name] = station_corrs_rv
        else:
            nhierarchs = 0

    def get_unique_stations(self):
        sl = [wmap.stations for wmap in self.wavemaps]
        us = []
        map(us.extend, sl)
        return list(set(us))

    @property
    def n_t(self):
        return sum(wmap.n_t for wmap in self.wavemaps)

    @property
    def datasets(self):
        if self._datasets is None:
            ds = []
            for wmap in self.wavemaps:
                ds.extend(wmap.datasets)

            self._datasets = ds
        return self._datasets

    @property
    def weights(self):
        if self._weights is None:
            ws = []
            for wmap in self.wavemaps:
                ws.extend(wmap.weights)

            self._weights = ws
        return self._weights

    @property
    def targets(self):
        if self._targets is None:
            ts = []
            for wmap in self.wavemaps:
                ts.extend(wmap.targets)

            self._targets = ts
        return self._targets

    def assemble_results(self, point):
        """
        Assemble seismic traces for given point in solution space.

        Parameters
        ----------
        point : :func:`pymc3.Point`
            Dictionary with model parameters

        Returns
        -------
        List with :class:`heart.SeismicResult`
        """
        logger.debug('Assembling seismic waveforms ...')

        syn_proc_traces, obs_proc_traces = self.get_synthetics(
            point, outmode='stacked_traces')

        syn_filt_traces, obs_filt_traces = self.get_synthetics(
            point, outmode='stacked_traces', taper_tolerance_factor=2.)

        ats = []
        for wmap in self.wavemaps:
            wc = wmap.config
            ats.extend(wmap.n_t * [wc.arrival_taper])

        results = []
        for i, (obs_tr, at) in enumerate(zip(obs_proc_traces, ats)):

            dtrace_proc = obs_tr.copy()
            dtrace_proc.set_ydata(
                (obs_tr.get_ydata() - syn_proc_traces[i].get_ydata()))

            dtrace_filt = obs_filt_traces[i].copy()
            dtrace_filt.set_ydata((obs_filt_traces[i].get_ydata() -
                                   syn_filt_traces[i].get_ydata()))

            taper = at.get_pyrocko_taper(float(obs_tr.tmin + num.abs(at.a)))

            results.append(
                heart.SeismicResult(processed_obs=obs_tr,
                                    processed_syn=syn_proc_traces[i],
                                    processed_res=dtrace_proc,
                                    filtered_obs=obs_filt_traces[i],
                                    filtered_syn=syn_filt_traces[i],
                                    filtered_res=dtrace_filt,
                                    taper=taper))

        return results

    def update_llks(self, point):
        """
        Update posterior likelihoods of the composite with respect to one point
        in the solution space.

        Parameters
        ----------
        point : dict
            with numpy array-like items and variable name keys
        """
        results = self.assemble_results(point)
        for k, result in enumerate(results):
            choli = self.datasets[k].covariance.chol_inverse
            tmp = choli.dot(result.processed_res.ydata)
            _llk = num.asarray([num.dot(tmp, tmp)])
            self._llks[k].set_value(_llk)
Пример #13
0
 def loadEngine(self, store, store_path):
     # load engine
     self.eng = LocalEngine(store_superdirs=store_path,
                            default_store_id=store)
Пример #14
0
class patch:
    def __init__(self,
                 name,
                 ss,
                 ds,
                 east,
                 north,
                 down,
                 length,
                 width,
                 strike,
                 dip,
                 sig_ss,
                 sig_ds,
                 sig_east,
                 sig_north,
                 sig_down,
                 sig_length,
                 sig_width,
                 sig_strike,
                 sig_dip,
                 dist='Unif',
                 connectivity=False,
                 conservation=False):

        self.name = name
        self.ss, self.sss = ss, sig_ss
        self.ds, self.sds = ds, sig_ds
        self.x1, self.sx1 = north, sig_north  # north top-left corner
        self.x2, self.sx2 = east, sig_east  # east top-left corner
        self.x3, self.sx3 = down, sig_down
        self.l, self.sl = length, sig_length
        self.w, self.sw = width, sig_width
        self.strike, self.sstrike = strike, sig_strike
        self.dip, self.sdip = dip, sig_dip
        self.dist = dist
        self.connectivity = connectivity
        self.conservation = conservation
        # initiate variable
        self.connectindex = 0
        # set uncertainties to 0 for connected patches
        if self.connectivity is not False:
            self.sstrike, self.sx3, self.sx2, self.sx1 = 0, 0, 0, 0

        # create model vector
        self.param = ['{} strike slip'.format(self.name),'{} dip slip'.format(self.name),\
        '{} north'.format(self.name),'{} east'.format(self.name),'{} down'.format(self.name),\
        '{} length'.format(self.name),'{} width'.format(self.name),'{} strike'.format(self.name),\
        '{} dip'.format(self.name)]

        self.m = self.tolist()
        self.sigmam = self.sigtolist()
        # self.mmin = list(map(operator.sub, self.m, self.sigmam))
        # self.mmax = list(map(operator.add, self.m, self.sigmam))

        # number of parameters per patch
        self.Mpatch = len(self.m)

    def connect(self, seg):
        # set strike
        self.strike = seg.strike

        # compute vertical distance and depth
        self.x3 = seg.x3 - self.w * math.sin(np.deg2rad(self.dip))

        # compute horizontal distance
        yp = math.cos(np.deg2rad(self.dip)) * self.w
        east_shift = -math.cos(np.deg2rad(seg.strike)) * yp
        north_shift = math.sin(np.deg2rad(seg.strike)) * yp
        self.x2, self.x1 = seg.x2 + east_shift, seg.x1 + north_shift

        # set uncertainties to 0
        # self.sstrike, self.sx3, self.sx2, self.sx1 = 0, 0, 0, 0

        # update m vector !!!! dangerous !!!!
        self.m = self.tolist()
        # self.sigmam = self.sigtolist()

    def build_prior(self):
        self.sampled = []
        self.fixed = []
        self.priors = []
        self.mmin, self.mmax = [], []

        for name, m, sig in zip(self.param, self.m, self.sigmam):
            if sig > 0.:
                # print name, m-sig, m+sig
                self.mmin.append(m - sig), self.mmax.append(m + sig)
                if self.dist == 'Normal':
                    p = pymc.Normal(name, mu=m, sd=sig)
                elif self.dist == 'Unif':
                    p = pymc.Uniform(name,
                                     lower=m - sig,
                                     upper=m + sig,
                                     value=m)
                else:
                    print(
                        'Problem with prior distribution difinition of parameter {}'
                        .format(name))
                    sys.exit(1)
                self.sampled.append(name)
                self.priors.append(p)
            elif sig == 0:
                self.fixed.append(name)
            else:
                print('Problem with prior difinition of parameter {}'.format(
                    name))
                sys.exit(1)
        # number of free parameters per patch
        self.Mfree = len(self.sampled)

    def info(self):
        print "name segment:", self.name
        print "# ss     ds     x1(km)     x2(km)     x3(km)    length(km)     width(km)   strike   dip  "
        print ' {:.2f}   {:.2f}   {:.1f}   {:.1f}   {:.2f}   {:.2f}   {:.2f}    {:d}     {:d}'.\
        format(*(self.tolist()))
        print "#sigma_ss   sigma_ds   sigma_x1  sigma_x2  sigma_x3  sigma_length  sigma_width   sigma_strike  sigma_dip  "
        print '  {:.2f}   {:.2f}   {:.1f}   {:.1f}   {:.2f}   {:.2f}   {:.2f}    {:d}     {:d}'.\
        format(*(self.sigtolist()))
        print

    def tolist(self):
        return [
            self.ss, self.ds, self.x1, self.x2, self.x3, self.l, self.w,
            int(self.strike),
            int(self.dip)
        ]

    def sigtolist(self):
        return [
            self.sss, self.sds, self.sx1, self.sx2, self.sx3, self.sl, self.sw,
            int(self.sstrike),
            int(self.sdip)
        ]

    def loadEngine(self, store, store_path):
        # load engine
        self.eng = LocalEngine(store_superdirs=store_path,
                               default_store_id=store)

    def engine(self, target, ref):

        # print store_path, store
        # print ref[0], ref[1]
        # print self.x1*1000., self.x2*1000., self.x3*1000., self.w*1000., self.l*1000., self.dip,
        # print np.rad2deg(math.atan2(self.ds,self.ss)), self.strike, self.time, (self.ss**2+self.ds**2)**0.5
        # print

        # print self.time
        self.source = RectangularSource(
            lon=ref[0],
            lat=ref[1],
            # distances in meters
            north_shift=np.float(self.x1 * 1000.),
            east_shift=np.float(self.x2 * 1000.),
            depth=np.float(self.x3 * 1000.),
            width=np.float(self.w * 1000.),
            length=np.float(self.l * 1000.),
            # angles in degree
            dip=np.float(self.dip),
            rake=np.float(np.rad2deg(math.atan2(self.ds, self.ss))),
            strike=np.float(self.strike),
            slip=np.float((self.ss**2 + self.ds**2)**0.5),
            time=self.time,
            anchor='top')
        # print self.source

        return self.eng.process(self.source, target)
Пример #15
0
from pyrocko.gf import LocalEngine, Target, DCSource, ws
from pyrocko import trace
from pyrocko.marker import PhaseMarker

# The store we are going extract data from:
store_id = 'landau_100hz'

# First, download a Greens Functions store. If you already have one that you
# would like to use, you can skip this step and point the *store_superdirs* in
# the next step to that directory.


# We need a pyrocko.gf.Engine object which provides us with the traces
# extracted from the store. In this case we are going to use a local
# engine since we are going to query a local store.
engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores'])



from silvertine import scenario
from pyrocko import model, cake, orthodrome
from silvertine.util.ref_mods import landau_layered_model
from silvertine.locate.locate1D import get_phases_list
mod = landau_layered_model()
scale = 2e-14
cake_phase = cake.PhaseDef("P")
phase_list = [cake_phase]
waveforms_events = []
waveforms_noise = []
stations = model.load_stations("stations.raw.txt")
nstations = len(stations)*3
from pyrocko.marker import PhaseMarker

# The store we are going extract data from:
store_id = 'iceland_reg_v2'

# First, download a Greens Functions store. If you already have one that you
# would like to use, you can skip this step and point the *store_superdirs* in
# the next step to that directory.

if not os.path.exists(store_id):
    ws.download_gf_store(site='kinherd', store_id=store_id)

# We need a pyrocko.gf.Engine object which provides us with the traces
# extracted from the store. In this case we are going to use a local
# engine since we are going to query a local store.
engine = LocalEngine(store_superdirs=['.'])

# Define a list of pyrocko.gf.Target objects, representing the recording
# devices. In this case one station with a three component sensor will
# serve fine for demonstation.
channel_codes = 'ENZ'
targets = [
    Target(
        lat=10.,
        lon=10.,
        store_id=store_id,
        codes=('', 'STA', '', channel_code))
    for channel_code in channel_codes]

# Let's use a double couple source representation.
source_dc = DCSource(
def plot(settings, show=False):

    # align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = "P"
    zoom_window = list(settings.zoom)
    ampl_scaler = "4*standard deviation"

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(":")
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.0
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = list(
        filter(lambda s: match_nslc("%s.%s.%s.*" % s.nsl(), traces[0].nslc_id),
               stations))
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning("%s  Using nearest neighbor instead." % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = (store.config.distance_max -
                                  farthest_source.distance_to(t))
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == "velocity":
            tr = integrate_differentiate(tr, "differentiate")

        onset = engine.get_store(t.store_id).t("begin",
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=event.time + onset + zoom_window[0],
            tmax=event.time + onset + zoom_window[1],
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c="black", linewidth=1.0, alpha=1.0)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color="black",
                            alpha=0.5)
        ax.text(
            zoom_window[0] * 1.09,
            y_pos,
            "%1.1f" % (s.depth / 1000.0),
            horizontalalignment="right",
        )  # , fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = "pP"
            arrivals = mod.arrivals(
                phases=[cake.PhaseDef(label)],
                distances=[s.distance_to(t) * cake.m2d],
                zstart=s.depth,
            )

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c="blue")

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    # fontsize=12,
                    color="black",
                    verticalalignment="top",
                    horizontalalignment="right",
                )

            except IndexError:
                logger.warning(
                    "no pP phase at d=%s z=%s stat=%s" %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception("No Trace found!")
    if len(traces) > 1:
        raise Exception("More then one trace provided!")
    else:
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == "displacement":
            tr = integrate_differentiate(tr, "integrate")
        tr = settings.do_filter(tr)
        onset = (engine.get_store(targets[0].store_id).t(
            "begin", (event.depth, s.distance_to(targets[0]))) + event.time)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=onset + zoom_window[0] + correction,
            tmax=onset + zoom_window[1] + correction,
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.0)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel("Time [s]")
        ax.text(
            0.0,
            0.6,
            "Source depth [km]",
            rotation=90,
            horizontalalignment="left",
            transform=fig.transFigure,
        )  # , fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0.0, c="black")
        vline.set_linestyle("--")
    if settings.title:
        params = {
            "array-id": "".join(station.nsl()),
            "event_name": event.name,
            "event_time": time_to_str(event.time),
        }
        ax.text(
            0.5,
            1.05,
            settings.title % params,
            horizontalalignment="center",
            transform=ax.transAxes,
        )
    if settings.auto_caption:
        cax = fig.add_axes([0.0, 0.0, 1, 0.05], label="caption")
        cax.axis("off")
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == "displacement":
            quantity_info = "integrated velocity trace. "
        if settings.quantity == "velocity":
            quantity_info = "differentiated synthetic traces. "
        if settings.quantity == "restituted":
            quantity_info = "restituted traces. "

        captions = {"filters": ""}
        for f in settings.filters:
            captions["filters"] += "%s-pass, order %s, f$_c$=%s Hz. " % (
                f.type,
                f.order,
                f.corner,
            )
        captions["quantity_info"] = quantity_info
        captions["store_sampling"] = 1.0 / store.config.deltat
        cax.text(
            0,
            0,
            "Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s"
            % captions,
            fontsize=12,
            transform=cax.transAxes,
        )
        plt.subplots_adjust(hspace=0.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info("save as: %s " % settings.save_as)
        options = settings.__dict__
        options.update({"array-id": "".join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches="tight")
    if show:
        plt.show()
Пример #18
0
stftypes = n_eachstf * ['lognorm'] + n_eachstf * ['lognorm']
noise_types = n_eachstf * ['GR'] + n_eachstf * ['Gaussian']

# uniform depth of 20 km / 20,000m
depths = n_seismograms * [20000]

# seismometer target and the actual computation engine
target = gf.Target(quantity='displacement',
                   lat=0,
                   lon=long,
                   store_id=velocity_model,
                   codes=('NET', 'STA', 'LOC', 'E'),
                   tmin=tmin,
                   tmax=tmax)
engine = LocalEngine(store_dirs=[velocity_model])

# all of that for this!
print('Creating synthetic seismograms...')
seismograms, spectrograms = createSynthetics(moment_tensors, stfs, depths,
                                             target, engine)

fin = {
    'seismograms': seismograms,
    'spectrograms': spectrograms,
    'moment_tensors':
    [mt.m() for mt in moment_tensors],  # dump the actual tensors
    'strike_dip_rakes': [mt.both_strike_dip_rake() for mt in moment_tensors],
    'source_mechanisms': source_mechanisms,
    'depths': depths,
    'noise_types': noise_types,
Пример #19
0
def  doCalc (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    if cfg.UInt ('forerun')>0:
        ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') )
    else:
        ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') )
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.keys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
                        stations.append(szo) #right number of stations?


#==================================synthetic BeamForming=======================================

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted= calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.keys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.keys():
                    calcStreamMapshifted[trace]=tr
        calcStreamMap = calcStreamMapshifted


    if cfg.Bool('shift_by_phase_onset') == True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for trace in calcStreamMapshifted.keys():
            recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp
            recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp
            mod = shifted_traces[i]
            extracted = mod.chop(recordstarttime, recordendtime, inplace=False)
            shifted_obs_tr = obspy_compat.to_obspy_trace(extracted)
            calcStreamMapshifted[trace]=shifted_obs_tr
            i = i+1

        calcStreamMap = calcStreamMapshifted


    weight = 0.
    if cfg.Bool('weight_by_noise') == True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces, engine, event, stations,
         100., store_id, nwindows=1,
         check_events=True, phase_def='P')

    for trace in calcStreamMap.keys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ############################################################################
    traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float)
    traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float)
    latv   = num.ndarray (dimX*dimY, dtype=float)
    lonv   = num.ndarray (dimX*dimY, dtype=float)
    ############################################################################


    c=0
    streamCounter = 0

    for key in calcStreamMap.keys():
        streamID = key
        c2   = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1


        for key in TTTGridMap.keys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key


        if not streamCounter in traveltimes :
           continue                              #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ  = g.dimZ
        mint  = g.mint
        maxt  = g.maxt
        Latul = g.Latul
        Lonul = g.Lonul
        Lator = g.Lator
        Lonor = g.Lonor

        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime [c][x * dimY + y] = elem.tt
                latv [x * dimY + y] = elem.lat
                lonv [x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor


# ==================================semblance calculation=======

    t1 = time.time()
    traces = traces.reshape(1, nostat*minSampleCount)

    traveltimes = traveltime.reshape(1, nostat*dimX*dimY)
    TTTGrid = True
    manual_shift = False

    if manual_shift:

        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(
                    calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        backSemb = num.ndarray(shape=(ntimes, dimX*dimY), dtype=float)
        bf = BeamForming(stations, traces, normalize=True)

        for i in range(ntimes):
            sembmax = 0
            sembmaxX = 0
            sembmaxY = 0
            for j in range(dimX * dimY):
                event = model.Event(lat=float(latv[j]), lon=float(lonv[j]),
                                    depth=ev.depth*1000., time=timeev)
                directory = arrayfolder
                shifted_traces, stack = bf.process(event=event,
                                                   timing=timing,
                                                   fn_dump_center=pjoin(
                                                                directory,
                                                         'array_center.pf'),
                                                   fn_beam=pjoin(directory,
                                                                 'beam.mseed'))
                tmin = stack.tmin+(i*nstep)+20
                tmax = stack.tmin+(i*nstep)+60
                stack.chop(tmin, tmax)
                backSemb[i][j] = abs(sum(stack.ydata))

        k = backSemb
        TTTGrid = False

    if TTTGrid:
        start_time = time.time()
        if cfg.UInt('forerun') > 0:
            ntimes = int((cfg.UInt('forerun') + cfg.UInt('duration'))/step)
        else:
            ntimes = int((cfg.UInt('duration')) / step)
        nsamp = int(winlen)
        nstep = int(step)
        Gmint = cfg.Int('forerun')

        k = semblance(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                      new_frequence, minSampleCount, latv, lonv, traveltimes,
                      traces, calcStreamMap, timeev, Config, Origin)
        print("--- %s seconds ---" % (time.time() - start_time))

    t2 = time.time()

    Logfile.add('%s took %0.3f s' % ('CALC:',(t2-t1)))

    partSemb = k
    partSemb = partSemb.reshape(ntimes, migpoints)

    return partSemb
Пример #20
0
 def __init__(self, serve_dir):
     threading.Thread.__init__(self)
     self.engine = LocalEngine(store_dirs=[serve_dir])
     self.s = server.Server('localhost', 32483,
                            server.SeismosizerHandler, self.engine)
Пример #21
0
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(
        lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id),
        stations)
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                    t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(
        zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == 'velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t('begin',
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time + onset + zoom_window[0],
                          tmax=event.time + onset + zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color='black',
                            alpha=0.5)
        ax.text(zoom_window[0] * 1.09,
                y_pos,
                '%1.1f' % (s.depth / 1000.),
                horizontalalignment='right')  #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                    distances=[s.distance_to(t) * cake.m2d],
                                    zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    #fontsize=12,
                    color='black',
                    verticalalignment='top',
                    horizontalalignment='right')

            except IndexError:
                logger.warning(
                    'no pP phase at d=%s z=%s stat=%s' %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception('No Trace found!')
    if len(traces) > 1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == 'displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=onset + zoom_window[0] + correction,
                          tmax=onset + zoom_window[1] + correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0,
                0.6,
                'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure)  #, fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {
            'array-id': ''.join(station.nsl()),
            'event_name': event.name,
            'event_time': time_to_str(event.time)
        }
        ax.text(0.5,
                1.05,
                settings.title % params,
                horizontalalignment='center',
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters': ''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. ' % (
                f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1. / store.config.deltat
        cax.text(
            0,
            0,
            'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s'
            % captions,
            fontsize=12,
            transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
Пример #22
0
    def load(self, inv):
        # load the data as a pyrocko pile and reform them into an array of traces
        data = pile.make_pile([self.wdir + self.reduction])
        self.traces = data.all()

        # load station file
        fname = self.wdir + self.network
        stations_list = model.load_stations(fname)

        for s in stations_list:
            s.set_channels_by_name(*self.component.split())

        self.targets = []
        self.tmin, self.tmax = [], []
        self.arrivals = []
        self.names = []

        for station, tr in zip(stations_list,
                               self.traces):  # iterate over all stations
            # print station.lat, station.lon
            target = Target(
                lat=np.float(station.lat),  # station lat.
                lon=np.float(station.lon),  # station lon.
                store_id=inv.store,  # The gf-store to be used for this target,
                # we can also employ different gf-stores for different targets.
                interpolation='multilinear',  # interp. method between gf cells
                quantity='displacement',  # wanted retrieved quantity
                codes=station.nsl() +
                ('BH' + self.component, ))  # Station and network code

            # Next we extract the expected arrival time for this station from the the store,
            # so we can use this later to define a cut-out window for the optimization:
            self.targets.append(target)
            self.names.append(station.nsl()[1])

        # print len(self.traces), len(self.targets)

        for station, tr, target in zip(stations_list, self.traces,
                                       self.targets):

            engine = LocalEngine(store_superdirs=inv.store_path)
            store = engine.get_store(inv.store)
            # trace.snuffle(tr, events=self.events)
            arrival = store.t(self.phase, self.base_source,
                              target)  # expected P-wave arrival
            # print arrival
            tmin = self.base_source.time + arrival - 15  # start 15s before theor. arrival
            tmax = self.base_source.time + arrival + 15  # end 15s after theor. arrival
            # # print self.tmin,self.tmax
            tr.chop(tmin=tmin, tmax=tmax)
            self.tmin.append(tmin)
            self.tmax.append(tmax)
            self.arrivals.append(self.base_source.time + arrival)

        self.Npoints = len(self.targets)
        # data vector
        self.d = []
        self.d.append(map((lambda x: getattr(x, 'ydata')), self.traces))
        self.d = flatten(self.d)
        # time vector
        t = []
        for i in xrange(self.Npoints):
            t.append(self.traces[i].get_xdata())
        # self.t.append(map((lambda x: getattr(x,'get_xdata()')),self.traces))
        # convert time
        self.t = time2dec(map(util.time_to_str, flatten(t)))
        # print self.t
        self.N = len(self.d)
Пример #23
0
    def __init__(self):
        pass

    def check_snr(self):
        pass


if __name__ == '__main__':
    '''usage example'''
    km = 1000.
    #fbands = []
    #fbands.apppend([1.0, 2.0])
    #fbands.apppend([2.0, 6.0])
    #fbands.apppend([4.0, 10.])

    phases = LocalEngine(store_superdirs=['/data/stores'],
                         default_store_id='globalttt').get_store()

    #filenames = glob.glob('data/*.mseed')
    #filenames = glob.glob('/data/webnet/waveform_R/2008/*.mseed')
    #datapath = '/data/webnet/mseed/2008'
    #datapath = '/data/webnet/waveform_R/2008'
    #datapath = '/data/share/Res_all_NKC'
    datapath = '/media/usb0/Res_all_NKC_taper'
    #datapath = '/media/usb0/restituted_pyrocko'
    stations = model.load_stations('../data/stations.pf')
    reference_id = 'NKC'
    references = {}
    data_pile = pile.make_pile(datapath, selector='rest_*')

    fband = {'order': 4, 'corner_hp': 1.0, 'corner_lp': 4.}
    window = StaticLengthWindow(static_length=30., phase_position=0.5)
Пример #24
0
def  doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,
                Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    if cfg.UInt ('forerun')>0:
        ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') )
    else:
        ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') )
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.keys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
                        stations.append(szo) #right number of stations?

    store_id = syn_in.store()
    engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])

    targets = []
    for st in stations:
        target = Target(
                lat=st.lat,
                lon=st.lon,
                store_id=store_id,
                codes=(st.network, st.station, st.location, 'BHZ'),
                tmin=-1900,
                tmax=3900,
                interpolation='multilinear',
                quantity=cfg.quantity())
        targets.append(target)

    if syn_in.nsources() == 1:
        if syn_in.use_specific_stf() is True:
            stf = syn_in.stf()
            exec(stf)
        else:
            stf = STF()
        if syn_in.source() == 'RectangularSource':
                source = RectangularSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    width=syn_in.width_0()*1000.,
                    length=syn_in.length_0()*1000.,
                    nucleation_x=syn_in.nucleation_x_0(),
                    slip=syn_in.slip_0(),
                    nucleation_y=syn_in.nucleation_y_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()))
        if syn_in.source() == 'DCSource':
                source = DCSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()),
                    magnitude=syn_in.magnitude_0())

    else:
        sources = []
        for i in range(syn_in.nsources()):
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)

            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                    sources.append(RectangularSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_syn_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        width=syn_in.width_1(i)*1000.,
                        length=syn_in.length_1(i)*1000.,
                        nucleation_x=syn_in.nucleation_x_1(i),
                        slip=syn_in.slip_1(i),
                        nucleation_y=syn_in.nucleation_y_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i))))

            if syn_in.source() == 'DCSource':
                    sources.append(DCSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i)),
                        magnitude=syn_in.magnitude_1(i)))
        source = CombiSource(subsources=sources)
    response = engine.process(source, targets)

    synthetic_traces = response.pyrocko_traces()
    if cfg.Bool('synthetic_test_add_noise') is True:
        from noise_addition import add_noise
        trs_orgs = []
        calcStreamMapsyn = calcStreamMap.copy()
        #from pyrocko import trace
        for tracex in calcStreamMapsyn.keys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12]) == str(tracex[4:]):
                        tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                        tr_org.downsample_to(2.0)
                        trs_orgs.append(tr_org)
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(),
                                     stations,
                                     store_id, phase_def='P')
    trs_org = []
    trs_orgs = []
    fobj = os.path.join(arrayfolder, 'shift.dat')
    xy = num.loadtxt(fobj, usecols=1, delimiter=',')
    calcStreamMapsyn = calcStreamMap.copy()
    #from pyrocko import trace
    for tracex in calcStreamMapsyn.keys():
            for trl in synthetic_traces:
                if str(trl.name()[4:12])== str(tracex[4:]):
                    mod = trl

                    recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                    trs_orgs.append(tr_org)

                    tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False)
                    synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapsyn[tracex] = synthetic_obs_tr
                    trs_org.append(tr_org_add)
    calcStreamMap = calcStreamMapsyn

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted= calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.keys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.keys():
                    calcStreamMapshifted[trace]=tr
        calcStreamMap = calcStreamMapshifted


    if cfg.Bool('shift_by_phase_onset') == True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for trace in calcStreamMapshifted.keys():
            recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp
            recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp
            mod = shifted_traces[i]
            extracted = mod.chop(recordstarttime, recordendtime, inplace=False)
            shifted_obs_tr = obspy_compat.to_obspy_trace(extracted)
            calcStreamMapshifted[trace]=shifted_obs_tr
            i = i+1

        calcStreamMap = calcStreamMapshifted


    weight = 0.
    if cfg.Bool('weight_by_noise') == True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces, engine, event, stations,
         100., store_id, nwindows=1,
         check_events=True, phase_def='P')

    for trace in calcStreamMap.keys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ############################################################################
    traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float)
    traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float)
    latv   = num.ndarray (dimX*dimY, dtype=float)
    lonv   = num.ndarray (dimX*dimY, dtype=float)
    ############################################################################


    c=0
    streamCounter = 0

    for key in calcStreamMap.keys():
        streamID = key
        c2   = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1


        for key in TTTGridMap.keys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key


        if not streamCounter in traveltimes :
           continue                              #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ  = g.dimZ
        mint  = g.mint
        maxt  = g.maxt
        Latul = g.Latul
        Lonul = g.Lonul
        Lator = g.Lator
        Lonor = g.Lonor

        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime [c][x * dimY + y] = elem.tt
                latv [x * dimY + y] = elem.lat
                lonv [x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor


    ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ##################
    nsamp = winlen * new_frequence

    nstep = int (step*new_frequence)
    migpoints = dimX * dimY

    dimZ = 0
    new_frequence = cfg.newFrequency ()              # ['new_frequence']
    maxp = int (Config['ncore'])


    Logfile.add ('PROCESS %d  NTIMES: %d' % (flag,ntimes))

    if False :
       print ('nostat ',nostat,type(nostat))
       print ('nsamp ',nsamp,type(nsamp))
       print ('ntimes ',ntimes,type(ntimes))
       print ('nstep ',nstep,type(nstep))
       print ('dimX ',dimX,type(dimX))
       print ('dimY ',dimY,type(dimY))
       print ('mint ',Gmint,type(mint))
       print ('new_freq ',new_frequence,type(new_frequence))
       print ('minSampleCount ',minSampleCount,type(minSampleCount))
       print ('latv ',latv,type(latv))
       print ('traces',traces,type(traces))
       print ('traveltime',traveltime,type(traveltime))


#==================================semblance calculation========================================

    t1 = time.time()
    traces = traces.reshape   (1,nostat*minSampleCount)
    traveltime = traveltime.reshape (1,nostat*dimX*dimY)
    USE_C_CODE = True
    try:
        if USE_C_CODE :
            import Cm
            import CTrig
            start_time = time.time()
            k  = Cm.otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                          minSampleCount,latv,lonv,traveltime,traces)
            print("--- %s seconds ---" % (time.time() - start_time))
        else :
            start_time = time.time()
            k = otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                      minSampleCount,latv,lonv,traveltime,traces)                       #hs
            print("--- %s seconds ---" % (time.time() - start_time))
    except:
        print("loaded tttgrid has probably wrong dimensions or stations, delete\
                ttgrid or exchange")

    t2 = time.time()


    partSemb = k

    partSemb_syn  = partSemb.reshape (ntimes,migpoints)


    return partSemb_syn
Пример #25
0
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt,
           TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder,
           syn_in):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation'))
    Logfile.add('MINT  : %f  MAXT: %f Traveltime' % (Gmint, Gmaxt))

    cfg = ConfigObj(dict=Config)
    cfg_f = FilterCfg(Config)

    timeev = util.str_to_time(ev.time)
    dimX = cfg.dimX()  #('dimx')
    dimY = cfg.dimY()  #('dimy')
    winlen = cfg.winlen()  #('winlen')
    step = cfg.step()  #('step')

    new_frequence = cfg.newFrequency()  #('new_frequence')
    forerun = cfg.Int('forerun')
    duration = cfg.Int('duration')

    nostat = len(WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount = 999999999

    ntimes = int((forerun + duration) / step)
    nsamp = int(winlen * new_frequence)
    nstep = int(step * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    lats = []
    lons = []
    for trace in calcStreamMap.iterkeys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                szo = model.Station(lat=float(il.lat),
                                    lon=float(il.lon),
                                    station=il.sta,
                                    network=il.net,
                                    channels=py_tr.channel,
                                    elevation=il.ele,
                                    location=il.loc)
                stations.append(szo)
                lats.append(float(il.lat))
                lons.append(float(il.lon))
    array_center = [num.mean(lats), num.mean(lons)]

    #==================================synthetic BeamForming======================

    if cfg.Bool('synthetic_test') is True:
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        recordstarttimes = []
        for tracex in calcStreamMap.iterkeys():
            recordstarttimes.append(
                calcStreamMap[tracex].stats.starttime.timestamp)
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMap[tracex])
            tmin = tr_org.tmin

        #tmin= num.min(recordstarttimes)
        targets = []
        sources = []
        for st in stations:
            target = Target(lat=st.lat,
                            lon=st.lon,
                            store_id=store_id,
                            codes=(st.network, st.station, st.location, 'BHZ'),
                            tmin=-6900,
                            tmax=6900,
                            interpolation='multilinear',
                            quantity=cfg.quantity())
            targets.append(target)

        if syn_in.nsources() == 1:
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)
            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                sources.append(
                    RectangularSource(
                        lat=float(syn_in.lat_0()),
                        lon=float(syn_in.lon_0()),
                        east_shift=float(syn_in.east_shift_0()) * 1000.,
                        north_shift=float(syn_in.north_shift_0()) * 1000.,
                        depth=syn_in.depth_syn_0() * 1000.,
                        strike=syn_in.strike_0(),
                        dip=syn_in.dip_0(),
                        rake=syn_in.rake_0(),
                        width=syn_in.width_0() * 1000.,
                        length=syn_in.length_0() * 1000.,
                        nucleation_x=syn_in.nucleation_x_0(),
                        slip=syn_in.slip_0(),
                        nucleation_y=syn_in.nucleation_y_0(),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_0())))
            if syn_in.source() == 'DCSource':
                sources.append(
                    DCSource(lat=float(syn_in.lat_0()),
                             lon=float(syn_in.lon_0()),
                             east_shift=float(syn_in.east_shift_0()) * 1000.,
                             north_shift=float(syn_in.north_shift_0()) * 1000.,
                             depth=syn_in.depth_syn_0() * 1000.,
                             strike=syn_in.strike_0(),
                             dip=syn_in.dip_0(),
                             rake=syn_in.rake_0(),
                             stf=stf,
                             time=util.str_to_time(syn_in.time_0()),
                             magnitude=syn_in.magnitude_0()))

        else:
            for i in range(syn_in.nsources()):
                if syn_in.use_specific_stf() is True:
                    stf = syn_in.stf()
                    exec(stf)

                else:
                    stf = STF()
                if syn_in.source() == 'RectangularSource':
                    sources.append(
                        RectangularSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            east_shift=float(syn_in.east_shift_1(i)) * 1000.,
                            north_shift=float(syn_in.north_shift_1(i)) * 1000.,
                            depth=syn_in.depth_syn_1(i) * 1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            width=syn_in.width_1(i) * 1000.,
                            length=syn_in.length_1(i) * 1000.,
                            nucleation_x=syn_in.nucleation_x_1(i),
                            slip=syn_in.slip_1(i),
                            nucleation_y=syn_in.nucleation_y_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i))))

                if syn_in.source() == 'DCSource':
                    sources.append(
                        DCSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            east_shift=float(syn_in.east_shift_1(i)) * 1000.,
                            north_shift=float(syn_in.north_shift_1(i)) * 1000.,
                            depth=syn_in.depth_syn_1(i) * 1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i)),
                            magnitude=syn_in.magnitude_1(i)))
            #source = CombiSource(subsources=sources)
        synthetic_traces = []
        for source in sources:
            response = engine.process(source, targets)
            synthetic_traces_source = response.pyrocko_traces()
            if not synthetic_traces:
                synthetic_traces = synthetic_traces_source
            else:
                for trsource, tr in zip(synthetic_traces_source,
                                        synthetic_traces):
                    tr.add(trsource)
            from pyrocko import trace as trld
            #trld.snuffle(synthetic_traces)
        timeev = util.str_to_time(syn_in.time_0())
        if cfg.Bool('synthetic_test_add_noise') is True:
            from noise_addition import add_noise
            trs_orgs = []
            calcStreamMapsyn = calcStreamMap.copy()
            #from pyrocko import trace
            for tracex in calcStreamMapsyn.iterkeys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                            trl.name()[3:13]) == str(tracex[3:]) or str(
                                trl.name()[3:11]) == str(tracex[3:]) or str(
                                    trl.name()[3:14]) == str(tracex[3:]):
                        tr_org = obspy_compat.to_pyrocko_trace(
                            calcStreamMapsyn[tracex])
                        tr_org.downsample_to(2.0)
                        trs_orgs.append(tr_org)
            store_id = syn_in.store()
            engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
            synthetic_traces = add_noise(trs_orgs,
                                         engine,
                                         source.pyrocko_event(),
                                         stations,
                                         store_id,
                                         phase_def='P')
        trs_org = []
        trs_orgs = []
        from pyrocko import trace
        fobj = os.path.join(arrayfolder, 'shift.dat')
        calcStreamMapsyn = calcStreamMap.copy()
        for tracex in calcStreamMapsyn.iterkeys():
            for trl in synthetic_traces:
                if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                        trl.name()[3:13]) == str(tracex[3:]) or str(
                            trl.name()[3:11]) == str(tracex[3:]) or str(
                                trl.name()[3:14]) == str(tracex[3:]):
                    mod = trl
                    recordstarttime = calcStreamMapsyn[
                        tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapsyn[
                        tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(
                        calcStreamMapsyn[tracex])
                    if switch == 0:
                        tr_org.bandpass(4, cfg_f.flo(), cfg_f.fhi())
                    elif switch == 1:
                        tr_org.bandpass(4, cfg_f.flo2(), cfg_f.fhi2())
                    trs_orgs.append(tr_org)
                    tr_org_add = mod.chop(recordstarttime,
                                          recordendtime,
                                          inplace=False)
                    synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapsyn[tracex] = synthetic_obs_tr
                    trs_org.append(tr_org_add)
        calcStreamMap = calcStreamMapsyn

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted = calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.iterkeys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.iterkeys():
                calcStreamMapshifted[trace] = tr
        calcStreamMap = calcStreamMapshifted

    if cfg.Bool('shift_by_phase_cc') is True:
        from stacking import align_traces
        calcStreamMapshifted = calcStreamMap.copy()
        list_tr = []
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = calcStreamMapshifted[trace]
            list_tr.append(tr_org)
        shifts, ccs = align_traces(list_tr, 10, master=False)
        for shift in shifts:
            for trace in calcStreamMapshifted.iterkeys():
                tr_org = obspy_compat.to_pyrocko_trace(
                    calcStreamMapshifted[trace])
                tr_org.shift(shift)
                shifted = obspy_compat.to_obspy_trace(tr_org)
                calcStreamMapshifted[trace] = shifted
        calcStreamMap = calcStreamMapshifted

    if cfg.Bool('shift_by_phase_onset') is True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
            trs_orgs.append(tr_org)

        timing = CakeTiming(
            phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
            fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat),
                            lon=float(ev.lon),
                            depth=ev.depth * 1000.,
                            time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                                    timing=timing,
                                    fn_dump_center=pjoin(
                                        directory, 'array_center.pf'),
                                    fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for tracex in calcStreamMapshifted.iterkeys():
            for trl in shifted_traces:
                if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                        trl.name()[3:13]) == str(tracex[3:]) or str(
                            trl.name()[3:11]) == str(tracex[3:]) or str(
                                trl.name()[3:14]) == str(tracex[3:]):
                    mod = trl
                    recordstarttime = calcStreamMapshifted[
                        tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapshifted[
                        tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(
                        calcStreamMapshifted[tracex])
                    tr_org_add = mod.chop(recordstarttime,
                                          recordendtime,
                                          inplace=False)
                    shifted_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapshifted[tracex] = shifted_obs_tr
        calcStreamMap = calcStreamMapshifted

    weight = 1.
    if cfg.Bool('weight_by_noise') is True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
            trs_orgs.append(tr_org)

        timing = CakeTiming(
            phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
            fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat),
                            lon=float(ev.lon),
                            depth=ev.depth * 1000.,
                            time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                                    timing=timing,
                                    fn_dump_center=pjoin(
                                        directory, 'array_center.pf'),
                                    fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces,
                         engine,
                         event,
                         stations,
                         100.,
                         store_id,
                         nwindows=1,
                         check_events=True,
                         phase_def='P')

    if cfg.Bool('array_response') is True:
        from obspy.signal import array_analysis
        from obspy.core import stream
        ntimesr = int((forerun + duration) / step)
        nsampr = int(winlen)
        nstepr = int(step)
        sll_x = -3.0
        slm_x = 3.0
        sll_y = -3.0
        slm_y = 3.0
        sl_s = 0.03,
        # sliding window properties

        # frequency properties
        frqlow = 1.0,
        frqhigh = 8.0
        prewhiten = 0
        # restrict output
        semb_thres = -1e9
        vel_thres = -1e9
        stime = stime
        etime = etime
        stream_arr = stream.Stream()
        for trace in calcStreamMapshifted.iterkeys():
            stream_arr.append(calcStreamMapshifted[trace])
        results = array_analysis.array_processing(stream_arr, nsamp, nstep,\
                                                  sll_x, slm_x, sll_y, slm_y,\
                                                   sl_s, semb_thres, vel_thres, \
                                                   frqlow, frqhigh, stime, \
                                                   etime, prewhiten)
        timestemp = results[0]
        relative_relpow = results[1]
        absolute_relpow = results[2]

    for trace in calcStreamMap.iterkeys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ###########################################################################

    traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount),
                         dtype=float)
    traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY),
                             dtype=float)

    latv = num.ndarray(dimX * dimY, dtype=float)
    lonv = num.ndarray(dimX * dimY, dtype=float)
    ###########################################################################

    c = 0
    streamCounter = 0

    for key in calcStreamMap.iterkeys():
        streamID = key
        c2 = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1

        for key in TTTGridMap.iterkeys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key

        if not streamCounter in traveltimes:
            continue  #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ = g.dimZ
        mint = g.mint
        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime[c][x * dimY + y] = elem.tt
                latv[x * dimY + y] = elem.lat
                lonv[x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor

    ################ CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ########
    nsamp = winlen * new_frequence

    nstep = step * new_frequence
    migpoints = dimX * dimY

    dimZ = 0
    maxp = int(Config['ncore'])

    Logfile.add('PROCESS %d  NTIMES: %d' % (flag, ntimes))

    if False:
        print('nostat ', nostat, type(nostat))
        print('nsamp ', nsamp, type(nsamp))
        print('ntimes ', ntimes, type(ntimes))
        print('nstep ', nstep, type(nstep))
        print('dimX ', dimX, type(dimX))
        print('dimY ', dimY, type(dimY))
        print('mint ', Gmint, type(mint))
        print('new_freq ', new_frequence, type(new_frequence))
        print('minSampleCount ', minSampleCount, type(minSampleCount))
        print('latv ', latv, type(latv))
        print('traces', traces, type(traces))

#===================compressed sensing=================================
    try:
        cs = cfg.cs()
    except:
        cs = 0
    if cs == 1:
        csmaxvaluev = num.ndarray(ntimes, dtype=float)
        csmaxlatv = num.ndarray(ntimes, dtype=float)
        csmaxlonv = num.ndarray(ntimes, dtype=float)
        folder = Folder['semb']
        fobjcsmax = open(os.path.join(folder, 'csmax_%s.txt' % (switch)), 'w')
        traveltimes = traveltime.reshape(1, nostat * dimX * dimY)
        traveltime2 = toMatrix(traveltimes, dimX * dimY)  # for relstart
        traveltime = traveltime.reshape(dimX * dimY, nostat)
        import matplotlib as mpl
        import scipy.optimize as spopt
        import scipy.fftpack as spfft
        import scipy.ndimage as spimg
        import cvxpy as cvx
        import matplotlib.pyplot as plt
        A = spfft.idct(traveltime, norm='ortho', axis=0)
        n = (nostat * dimX * dimY)
        vx = cvx.Variable(dimX * dimY)
        res = cvx.Variable(1)
        objective = cvx.Minimize(cvx.norm(res, 1))
        back2 = num.zeros([dimX, dimY])
        l = int(nsamp)
        fobj = open(
            os.path.join(folder,
                         '%s-%s_%03d.cs' % (switch, Origin['depth'], l)), 'w')
        for i in range(ntimes):
            ydata = []
            try:
                for tr in traces:
                    relstart = int((dimX * dimY - mint) * new_frequence +
                                   0.5) + i * nstep
                    tr = spfft.idct(tr[relstart + i:relstart + i +
                                       dimX * dimY],
                                    norm='ortho',
                                    axis=0)

                    ydata.append(tr)
                    ydata = num.asarray(ydata)
                    ydata = ydata.reshape(dimX * dimY, nostat)

                    constraints = [
                        res == cvx.sum_entries(0 + num.sum([
                            ydata[:, x] - A[:, x] * vx for x in range(nostat)
                        ]))
                    ]

                    prob = cvx.Problem(objective, constraints)
                    result = prob.solve(verbose=False, max_iters=200)

                    x = num.array(vx.value)
                    x = num.squeeze(x)
                    back1 = x.reshape(dimX, dimY)
                    sig = spfft.idct(x, norm='ortho', axis=0)
                    back2 = back2 + back1
                    xs = num.array(res.value)
                    xs = num.squeeze(xs)
                    max_cs = num.max(back1)
                    idx = num.where(back1 == back1.max())
                    csmaxvaluev[i] = max_cs
                    csmaxlatv[i] = latv[idx[0]]
                    csmaxlonv[i] = lonv[idx[1]]
                    fobj.write('%.5f %.5f %.20f\n' %
                               (latv[idx[0]], lonv[idx[1]], max_cs))
                    fobjcsmax.write('%.5f %.5f %.20f\n' %
                                    (latv[idx[0]], lonv[idx[1]], max_cs))
                fobj.close()
                fobjcsmax.close()

            except:
                pass

#==================================semblance calculation========================================

    t1 = time.time()
    traces = traces.reshape(1, nostat * minSampleCount)

    traveltimes = traveltime.reshape(1, nostat * dimX * dimY)
    USE_C_CODE = False
    #try:
    if USE_C_CODE:
        import Cm
        import CTrig
        start_time = time.time()
        k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                     new_frequence, minSampleCount, latv, lonv, traveltimes,
                     traces)
        print("--- %s seconds ---" % (time.time() - start_time))
    else:
        start_time = time.time()
        ntimes = int((forerun + duration) / step)
        nsamp = int(winlen)
        nstep = int(step)
        Gmint = cfg.Int('forerun')
        k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                  new_frequence, minSampleCount, latv, lonv, traveltimes,
                  traces, calcStreamMap, timeev)
        print("--- %s seconds ---" % (time.time() - start_time))
    #except ValueError:
    #        k  = Cm.otest(maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
    #                      minSampleCount,latv,lonv,traveltimes,traces)
    #    print "loaded tttgrid has probably wrong dimensions or stations,\
    #                delete ttgrid or exchange is recommended"

    t2 = time.time()

    Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1)))

    partSemb = k
    partSemb = partSemb.reshape(ntimes, migpoints)

    return partSemb, weight, array_center
Пример #26
0
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(
                float(zstart) * km,
                float(zstop) * km,
                float(inkr) * km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event) == 1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(
                lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id
                                     ), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % [])
            else:
                station = station[0]
            targets = [
                station_to_target(station,
                                  quantity=settings.quantity,
                                  store_id=settings.store_id)
            ]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' %
                                   error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources,
                                             key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources,
                                              key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(
                            t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                            t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets,
                                             sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces) == 1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(
                mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(
                mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            mesh_fwidth = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            misfits_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc - fc * 2. / 5.
                fr_max = fc + fc * 2. / 5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(
                            mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(
                            mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc + width * 0.5),
                                                order=4,
                                                type='low'),
                            ButterworthResponse(corner=float(fc - width * 0.5),
                                                order=4,
                                                type='high')
                        ]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(
                            candidate=candidate,
                            setup=settings.misfit_setup,
                            debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(
                                len(test_depths) + 1, 1, i + 1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m / n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print('best fits: \n', i_best_fits)
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits * 100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
Пример #27
0
def generate_test_data_grid(store_id,
                            store_dirs,
                            coordinates,
                            geometry_params,
                            pre=0.5,
                            post=3,
                            stations_input=None,
                            batch_loading=256,
                            paths_disks=None):

    engine = LocalEngine(store_superdirs=[store_dirs])
    store = engine.get_store(store_id)
    mod = store.config.earthmodel_1d
    cake_phase = cake.PhaseDef("P")
    phase_list = [cake_phase]

    waveforms_events = []
    waveforms_events_uncut = []
    waveforms_noise = []
    sources = []

    lats = coordinates[0]
    lons = coordinates[1]
    depths = coordinates[2]

    if stations_input is None:
        stations_unsorted = model.load_stations("data/stations.pf")
    else:
        stations_unsorted = model.load_stations(stations_input)
    for st in stations_unsorted:
        st.dist = orthodrome.distance_accurate50m(st.lat, st.lon, lats[0],
                                                  lons[0])
        st.azi = orthodrome.azimuth(st.lat, st.lon, lats[0], lons[0])
    stations = sorted(stations_unsorted, key=lambda x: x.dist, reverse=True)

    targets = []
    events = []
    mean_lat = []
    mean_lon = []
    max_rho = 0.
    for st in stations:
        mean_lat.append(st.lat)
        mean_lon.append(st.lon)
        for cha in st.channels:
            if cha.name is not "R" and cha.name is not "T" and cha.name is not "Z":
                target = Target(lat=st.lat,
                                lon=st.lon,
                                store_id=store_id,
                                interpolation='multilinear',
                                quantity='displacement',
                                codes=st.nsl() + (cha.name, ))
                targets.append(target)

    strikes = geometry_params[0]
    dips = geometry_params[1]
    rakes = geometry_params[2]
    vs = geometry_params[3]
    ws = geometry_params[4]

    grid_points = []
    for lat in lats:
        for lon in lons:
            for depth in depths:
                grid_points.append([lat, lon, depth])

    ray.init(num_cpus=num_cpus - 1)
    npm = len(lats) * len(lons) * len(depths)
    npm_geom = len(strikes) * len(dips) * len(rakes)

    results = ray.get([
        get_parallel_mtqt.remote(i,
                                 targets,
                                 store_id,
                                 post,
                                 pre,
                                 stations,
                                 mod,
                                 grid_points[i],
                                 strikes,
                                 dips,
                                 rakes,
                                 vs,
                                 ws,
                                 store_dirs,
                                 batch_loading=batch_loading,
                                 npm=npm_geom,
                                 paths_disks=paths_disks)
        for i in range(len(grid_points))
    ])
    ray.shutdown()
    return waveforms_events
Пример #28
0
    if envelope:
        cbar_label = 'Envelope ' + cbar_label

    cb = fig.colorbar(cmw, ax=ax, orientation='vertical', shrink=.8, pad=0.11)

    cb.set_label(cbar_label)

    if axes is None:
        plt.show()
    return resp


__all__ = ['plot_directivity']

if __name__ == '__main__':
    engine = LocalEngine(store_superdirs=['.'], use_config=True)

    rect_source = RectangularSource(depth=2.6 * km,
                                    strike=240.,
                                    dip=76.6,
                                    rake=-.4,
                                    anchor='top',
                                    nucleation_x=-.57,
                                    nucleation_y=-.59,
                                    velocity=2070.,
                                    length=27 * km,
                                    width=9.4 * km,
                                    slip=1.4)

    resp = plot_directivity(engine,
                            rect_source,
Пример #29
0
def get_parallel_mtqt(i,
                      targets,
                      store_id,
                      post,
                      pre,
                      stations,
                      mod,
                      params,
                      strikes,
                      dips,
                      rakes,
                      vs,
                      ws,
                      store_dirs,
                      batch_loading=192,
                      npm=0,
                      dump_full=False,
                      seiger1f=False,
                      path_count=0,
                      paths_disks=None,
                      con_line=True,
                      max_rho=0.,
                      mag=-6):

    engine = LocalEngine(store_superdirs=[store_dirs])
    store = engine.get_store(store_id)
    lat, lon, depth = params
    traces_uncuts = []
    tracess = []
    sources = []
    mtqt_ps = []
    count = 0
    npm = len(strikes) * len(dips) * len(rakes) * len(vs) * len(ws)
    npm_rem = npm
    data_events = []
    labels_events = []
    events = []
    if seiger1f is True:
        current_path = paths_disks[path_count]
    k = 0
    for strike in strikes:
        for dip in dips:
            for rake in rakes:
                for v in vs:
                    for w in ws:
                        name = "scenario" + str(i)
                        event = model.Event(name=name,
                                            lat=lat,
                                            lon=lon,
                                            magnitude=mag,
                                            depth=depth)
                        kappa = strike
                        sigma = rake
                        h = dip

                        source_mtqt = MTQTSource(lon=lon,
                                                 lat=lat,
                                                 depth=depth,
                                                 w=w,
                                                 v=v,
                                                 kappa=kappa,
                                                 sigma=sigma,
                                                 h=h,
                                                 magnitude=mag)

                        response = engine.process(source_mtqt, targets)
                        traces_synthetic = response.pyrocko_traces()

                        event.moment_tensor = source_mtqt.pyrocko_moment_tensor(
                        )

                        if dump_full is True:
                            traces_uncut = copy.deepcopy(traces)
                            traces_uncuts.append(traces_uncut)
                        traces = []
                        for tr in traces_synthetic:
                            for st in stations:
                                if st.station == tr.station:
                                    processed = False
                                    dist = (orthodrome.distance_accurate50m(
                                        source_mtqt.lat, source_mtqt.lon,
                                        st.lat, st.lon) + st.elevation
                                            )  #*cake.m2d
                                    while processed is False:
                                        processed = False
                                        depth = source_mtqt.depth
                                        arrival = store.t('P', (depth, dist))

                                        if processed is False:
                                            tr.chop(arrival - pre,
                                                    arrival + post)
                                            traces.append(tr)
                                            processed = True
                                        else:
                                            pass
                            nsamples = len(tr.ydata)

                        rho = 1
                        mtqt_ps = [[rho, v, w, kappa, sigma, h]]

                        data_event, nsamples = prepare_waveforms([traces])
                        label_event = prepare_labels([event], mtqt_ps)
                        data_events.append(data_event)
                        labels_events.append(label_event)
                        events.append(event)

                        if count == batch_loading or npm_rem < batch_loading:
                            npm_rem = npm_rem - batch_loading
                            k = k + 1
                            if seiger1f is True:
                                free = os.statvfs(current_path)[
                                    0] * os.statvfs(current_path)[4]
                                if free < 80000:
                                    current_path = paths_disks[path_count + 1]
                                    path_count = path_count + 1
                                f = open(
                                    current_path +
                                    "grid_%s_%s_%s_%s/batch_%s_grid_%s_SDR%s_%s_%s_%s_%s"
                                    % (store_id, lat, lon, int(depth), count,
                                       i, strike, dip, rake, v, w), 'ab')
                                pickle.dump([
                                    data_events, labels_events, nstations,
                                    nsamples, events
                                ], f)
                                f.close()
                            else:
                                util.ensuredir(
                                    "grids/grid_%s_%s_%s_%s/" %
                                    (store_id, lat, lon, int(depth)))
                                f = open(
                                    "grids/grid_%s_%s_%s_%s/batch_%s_grid_%s_SDR%s_%s_%s_%s_%s"
                                    % (store_id, lat, lon, int(depth), count,
                                       i, strike, dip, rake, v, w), 'ab')
                                pickle.dump([
                                    data_events, labels_events, nsamples,
                                    events
                                ], f)
                                f.close()

                            count = 0
                            data_events = []
                            labels_events = []
                            events = []
                            traces_uncuts = []
                            mtqt_ps = []

                        else:
                            count = count + 1
    return []
Пример #30
0
from pyrocko.gf import LocalEngine, RectangularSource, ws

km = 1e3
# The store we are going extract data from:
store_id = 'iceland_reg_v2'

# First, download a Greens Functions store. If you already have one that you
# would like to use, you can skip this step and point the *store_superdirs* in
# the next step to that directory.

if not os.path.exists(store_id):
    ws.download_gf_store(site='kinherd', store_id=store_id)

# We need a pyrocko.gf.Engine object which provides us with the traces
# extracted from the store.
engine = LocalEngine(store_superdirs=['.'])

# Create a RectangularSource with uniform fit.
rect_source = RectangularSource(depth=1.6 * km,
                                strike=240.,
                                dip=76.6,
                                rake=-.4,
                                anchor='top',
                                nucleation_x=-.57,
                                nucleation_y=-.59,
                                velocity=2070.,
                                length=27 * km,
                                width=9.4 * km,
                                slip=1.4)

resp = plot_directivity(engine,
Пример #31
0
import numpy as np
import os
pi = np.pi

np.random.seed(1234)
tf.random.set_seed(1234)

# Use the following store directory
store_dirs = "gf_stores/"

# Use the following store id

store_id = "mojavelargemlhf"

engine = LocalEngine(store_superdirs=[store_dirs])

model = tf.keras.models.load_model(
    'models/model_mechanism_single_gp_bnn_MT_mojavelargemlhf_5000.tf',
    compile=False)
#model = tf.keras.models.load_model('/media/asteinbe/aki/models_halfwork/model_mojavelargemlhf_35.919999999999995_-117.68999999999997_4000', compile=False)
# model compilation in seperate step because we use Tensorflow probability layers and loss
model.compile(optimizer=Adam(),
              loss=cnn_util.loss_function_negative_log_likelihood())

# To be able to evaluate the neg-log likelihood given certaint input we also need to load the weights again
#checkpoint_status = model.load_weights('models/model_weights_mechanism_single_gp_bnn_MT_mojavelargemlhf_5000')

data_dir = "data_syn/events/"

#data_dir="data/events/"
                    mod.profile('z') / 1000.,
                    label=label)
            ax.set_ylim([0, max_depth / 1000.])
            if xlim:
                ax.set_xlim(xlim)
            ax.set_ylabel('Depth [km]')
            ax.set_xlabel('$v_p$ [km/s]')
            if combine and len(store_ids) > 1:
                ax.legend(fontsize=9)
            else:
                ax.set_title(label)
            ax.invert_yaxis()
    fig.savefig(out_filename, pad_inches=0.04, bbox_inches='tight')


engine = LocalEngine(use_config=True)
parameters = ['vp']
store_ids = ['castor_geres_4']
max_depth = 15000.
xlim = (2, 9)

# Can be receiver or source:
store_id_mapping = ['Source Site']
out_filename = 'velocity_model_source.pdf'
which = 'source'

make_plot(which, store_ids, engine, parameters, out_filename, max_depth, xlim,
          store_id_mapping)

##############################################
out_filename = 'velocity_models.pdf'
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return 

    event = model.load_events(settings.event_filename)
    assert len(event)==1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
    assert len(station) == 1
    station = station[0] 
    targets = [station_to_target(station, quantity=quantity, store_id=settings.store_id)]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz-minz)*0.02
    for s, t, tr in request.iter_results():
        if quantity=='velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t(
            'begin', (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata()-onset-s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time+onset+zoom_window[0],
                          tmax=event.time+onset+zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata/ampl_scale)*relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color='black', alpha=0.5)
        ax.text(zoom_window[0]*1.09, y_pos, '%1.1f' % (s.depth/1000.), horizontalalignment='right') #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                      distances=[s.distance_to(t)*cake.m2d],
                                      zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t-onset]*2
                y = [y_pos-(maxz-minz)*0.025, y_pos+(maxz-minz)*0.025]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(x_marker[1]-x_marker[1]*0.005, y[1], label,
                        #fontsize=12,
                        color='black',
                        verticalalignment='top',
                        horizontalalignment='right')

            except IndexError:
                logger.warning('no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t)*cake.m2d,
                                                                     s.depth, station.station))
                pass

    if len(traces)==0:
        raise Exception('No Trace found!')
    if len(traces)>1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity=='displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata()-onset+correction
        tr_ydata = tr.get_ydata() *-1
        visible = tr.chop(tmin=onset+zoom_window[0]+correction,
                          tmax=onset+zoom_window[1]+correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata/ampl_scale * settings.gain*settings.gain_record)*relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin-zrange*0.2, zmax+zrange*0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0, 0.6, 'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure) #, fontsize=12.)

    if fill:
        ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color=settings.color, alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax+zrange*0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {'array-id': ''.join(station.nsl()),
                  'event_name': event.name,
                  'event_time': time_to_str(event.time)}
        ax.text(0.5, 1.05, settings.title % params,
                horizontalalignment='center', 
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters':''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. '%(f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1./store.config.deltat
        cax.text(0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions,
                 fontsize=12, transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()