Esempio n. 1
0
    def test_explosion_source(self):
        target = gf.Target(interpolation='nearest_neighbor')

        ex = gf.ExplosionSource(magnitude=5., volume_change=4., depth=5 * km)

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.validate()

        ex = gf.ExplosionSource(depth=5 * km)

        ex.validate()

        self.assertEqual(ex.get_moment(), 1.0)

        ex = gf.ExplosionSource(magnitude=3.0, depth=5 * km)

        store = self.dummy_store()

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.get_volume_change()

        volume_change = ex.get_volume_change(store, target)

        with self.assertRaises(TypeError):
            ex.get_volume_change(store,
                                 gf.Target(interpolation='nearest_neighbour'))

        ex = gf.ExplosionSource(volume_change=volume_change, depth=5 * km)

        self.assertAlmostEqual(ex.get_magnitude(store, target), 3.0)

        ex = gf.ExplosionSource(magnitude=3.0, depth=-5.)

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.get_volume_change(store, target)
Esempio n. 2
0
    def test_explosion_source(self):

        target = gf.Target(interpolation='nearest_neighbor')

        ex = gf.ExplosionSource(magnitude=5., volume_change=4., depth=5 * km)

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.validate()

        ex = gf.ExplosionSource(depth=5 * km)

        ex.validate()

        self.assertEqual(ex.get_moment(), 1.0)

        # magnitude input
        magnitude = 3.
        ex = gf.ExplosionSource(magnitude=magnitude, depth=5 * km)

        store = self.dummy_store()

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.get_volume_change()

        volume_change = ex.get_volume_change(store, target)

        self.assertAlmostEqual(ex.get_magnitude(store, target), magnitude)

        # validate with MT source
        moment = ex.get_moment(store, target) * float(num.sqrt(2. / 3))

        mt = gf.MTSource(mnn=moment, mee=moment, mdd=moment)

        self.assertAlmostEqual(ex.get_magnitude(store, target),
                               mt.get_magnitude(store=store, target=target))

        # discretized sources
        d_ex = ex.discretize_basesource(store=store, target=target)
        d_mt = mt.discretize_basesource(store=store, target=target)

        d_ex_m6s = d_ex.get_source_terms('elastic10')
        d_mt_m6s = d_mt.get_source_terms('elastic10')

        numeq(d_ex_m6s, d_mt_m6s, 1e-20)

        # interpolation method
        with self.assertRaises(TypeError):
            ex.get_volume_change(store,
                                 gf.Target(interpolation='nearest_neighbour'))

        # volume change input
        ex = gf.ExplosionSource(volume_change=volume_change, depth=5 * km)

        self.assertAlmostEqual(ex.get_magnitude(store, target), 3.0)

        ex = gf.ExplosionSource(magnitude=3.0, depth=-5.)

        with self.assertRaises(gf.DerivedMagnitudeError):
            ex.get_volume_change(store, target)
Esempio n. 3
0
    def test_discretize_rect_source_stf(self):

        store = self.dummy_homogeneous_store()
        target = gf.Target(interpolation='nearest_neighbor')
        stf = gf.HalfSinusoidSTF(duration=3.)

        for source in [
                gf.RectangularSource(depth=10 * km,
                                     slip=0.5,
                                     width=5 * km,
                                     length=5 * km,
                                     stf=stf,
                                     stf_mode='pre'),
                gf.RectangularSource(depth=10 * km,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km,
                                     decimation_factor=2,
                                     stf=stf,
                                     stf_mode='pre')
        ]:

            dsource = source.discretize_basesource(store, target)
            amplitudes = source._discretize(store, target)[2]
            assert amplitudes[0] != amplitudes[1]

            m1 = source.get_moment(store, target)
            m2 = dsource.centroid().pyrocko_moment_tensor().scalar_moment()
            assert abs(m1 - m2) < abs(m1 + m2) * 1e-6
Esempio n. 4
0
    def get_targets(self):
        targets = []
        for station in self.get_stations():
            channel_data = []
            channels = station.get_channels()
            if channels:
                for channel in channels:
                    channel_data.append(
                        [channel.name, channel.azimuth, channel.dip])

            else:
                for c_name in ['BHZ', 'BHE', 'BHN']:
                    channel_data.append([
                        c_name,
                        model.guess_azimuth_from_name(c_name),
                        model.guess_dip_from_name(c_name)
                    ])

            for c_name, c_azi, c_dip in channel_data:

                target = gf.Target(codes=(station.network, station.station,
                                          station.location, c_name),
                                   quantity='displacement',
                                   lat=station.lat,
                                   lon=station.lon,
                                   depth=station.depth,
                                   store_id=self.store_id,
                                   optimization='enable',
                                   interpolation='nearest_neighbor',
                                   azimuth=c_azi,
                                   dip=c_dip)

                targets.append(target)

        return targets
Esempio n. 5
0
    def test_stf_pre_post(self):
        store_dir = self.get_pulse_store_dir()
        engine = gf.LocalEngine(store_dirs=[store_dir])
        store = engine.get_store('pulse')

        for duration in [0., 0.05, 0.1]:
            trs = []
            for mode in ['pre', 'post']:
                source = gf.ExplosionSource(
                    time=store.config.deltat * 0.5,
                    depth=200.,
                    moment=1.0,
                    stf=gf.BoxcarSTF(duration=duration),
                    stf_mode=mode)

                target = gf.Target(codes=('', 'STA', '', 'Z'),
                                   north_shift=500.,
                                   east_shift=0.,
                                   store_id='pulse')

                xtrs = engine.process(source, target).pyrocko_traces()
                for tr in xtrs:
                    tr.set_codes(location='%3.1f_%s' % (duration, mode))
                    trs.append(tr)

            tmin = max(tr.tmin for tr in trs)
            tmax = min(tr.tmax for tr in trs)
            for tr in trs:
                tr.chop(tmin, tmax)

            amax = max(num.max(num.abs(tr.ydata)) for tr in trs)
            perc = num.max(num.abs(trs[0].ydata - trs[1].ydata) / amax) * 100.
            if perc > 0.1:
                logger.warn('test_stf_pre_post: max difference of %.1f %%' %
                            perc)
Esempio n. 6
0
    def get_modelling_targets(self, codes, lat, lon, depth, store_id,
                              backazimuth):

        mtargets = []
        for channel in self.channels:
            target = gf.Target(quantity='displacement',
                               codes=codes + (channel, ),
                               lat=lat,
                               lon=lon,
                               depth=depth,
                               store_id=store_id)

            if channel == 'R':
                target.azimuth = backazimuth - 180.
                target.dip = 0.
            elif channel == 'T':
                target.azimuth = backazimuth - 90.
                target.dip = 0.
            elif channel == 'Z':
                target.azimuth = 0.
                target.dip = -90.

            mtargets.append(target)

        return mtargets
Esempio n. 7
0
    def test_target_source_timing(self):
        store_dir = self.get_pulse_store_dir()
        engine = gf.LocalEngine(store_dirs=[store_dir])

        for stime in [0., -160000., time.time()]:
            source = gf.ExplosionSource(
                        depth=200.,
                        magnitude=4.,
                        time=stime)

            targets = [
                gf.Target(
                    codes=('', 'STA', '', component),
                    north_shift=500.,
                    tmin=source.time-300.,
                    tmax=source.time+300.,
                    east_shift=500.)

                for component in 'ZNE'
            ]

            response = engine.process(source, targets)
            synthetic_traces = response.pyrocko_traces()
            data = num.zeros(num.shape(synthetic_traces[0].ydata))
            for tr in synthetic_traces:
                data += tr.ydata

            sum_data = num.sum(abs(tr.ydata))
            assert sum_data > 1.0
Esempio n. 8
0
    def test_optimization(self):
        store_dir = self.get_pulse_store_dir()
        engine = gf.LocalEngine(store_dirs=[store_dir])

        sources = [
            gf.RectangularExplosionSource(time=0.0025,
                                          depth=depth,
                                          moment=1.0,
                                          length=100.,
                                          width=0.,
                                          nucleation_x=-1)
            for depth in [100., 200., 300.]
        ]

        targetss = [[
            gf.Target(codes=('', 'STA', opt, component),
                      north_shift=500.,
                      east_shift=125.,
                      depth=depth,
                      interpolation='multilinear',
                      optimization=opt) for component in 'ZNE'
            for depth in [0., 5., 10]
        ] for opt in ('disable', 'enable')]

        resps = [engine.process(sources, targets) for targets in targetss]

        iters = [resp.iter_results() for resp in resps]
        for i in xrange(len(sources) * len(targetss[0])):
            s1, t1, tr1 = iters[0].next()
            s2, t2, tr2 = iters[1].next()
            self.assertEqual(tr1.data_len(), tr2.data_len())
            self.assertEqual(tr1.tmin, tr2.tmin)
            self.assertTrue(numeq(tr1.ydata, tr2.ydata, 0.0001))
Esempio n. 9
0
    def test_combine_dsources(self):
        store = self.dummy_store()
        dummy_target = gf.Target()
        for S in gf.source_classes:
            if not hasattr(S, 'discretize_basesource'):
                continue

            for lats in [[10., 10., 10.], [10., 11., 12.]]:
                sources = [
                    default_source(S,
                                   lat=lat,
                                   lon=20.,
                                   depth=1000.,
                                   north_shift=500.,
                                   east_shift=500.) for lat in lats
                ]

                dsources = [
                    s.discretize_basesource(store, target=dummy_target)
                    for s in sources
                ]

                DS = dsources[0].__class__

                dsource = DS.combine(dsources)
                assert dsource.nelements == sum(s.nelements for s in dsources)
Esempio n. 10
0
    def test_qssp_build_2020_rotational(self):
        qssp.init(self.tmpdir,
                  '2020',
                  config_params=dict(stored_quantity='rotation',
                                     source_depth_max=10e3,
                                     distance_min=500e3,
                                     distance_max=600e3))

        store = gf.store.Store(self.tmpdir, 'r')
        store.make_ttt()
        qssp.build(self.tmpdir)

        del store

        engine = gf.LocalEngine(store_dirs=[self.tmpdir])

        source = gf.DCSource(lat=0., lon=0., depth=10e3, magnitude=6.0)

        targets = [
            gf.Target(quantity='rotation',
                      codes=('', 'ROT', '', comp),
                      lat=0.,
                      lon=0.,
                      north_shift=500e3,
                      east_shift=100e3) for comp in 'NEZ'
        ]

        engine.process(source, targets)
def main(n, depths, durations, noisy, noise_factor, velocity_model, long, tmin,
         tmax, save_file, save_dir, _run):

    engine = LocalEngine(store_dirs=[velocity_model])

    target = gf.Target(quantity='displacement',
                       lat=0,
                       lon=long,
                       store_id=velocity_model,
                       codes=('NET', 'STA', 'LOC', 'E'),
                       tmin=tmin,
                       tmax=tmax)

    df = createFocalMechanisms(n,
                               depths,
                               durations,
                               target,
                               engine,
                               noisy=noisy,
                               noise_factor=noise_factor)

    if os.path.exists(save_dir) == False: os.mkdir(save_dir)

    path = save_dir + save_file

    df.to_pickle(path)

    ex.add_artifact(path)
Esempio n. 12
0
    def test_pulse(self):
        store_dir = self.get_pulse_store_dir()

        engine = gf.LocalEngine(store_dirs=[store_dir])

        sources = [
            gf.ExplosionSource(
                time=0.0,
                depth=depth,
                moment=moment)

            for moment in (1.0, 2.0, 3.0) for depth in [100., 200., 300.]
        ]

        targets = [
            gf.Target(
                codes=('', 'STA', '', component),
                north_shift=500.,
                east_shift=0.)

            for component in 'ZNE'
        ]

        pulse = engine.get_store_extra(None, 'pulse')
        store = engine.get_store('pulse')

        response = engine.process(sources=sources, targets=targets)
        for source, target, tr in response.iter_results():
            t = tr.get_xdata()

            dist = math.sqrt((source.depth - target.depth)**2 +
                             source.distance_to(target)**2)

            data = pulse.evaluate(dist, t-source.time)

            phi = math.atan2((source.depth - target.depth),
                             source.distance_to(target)) * r2d

            azi, bazi = source.azibazi_to(target)

            data *= source.get_moment(store) * math.sqrt(2./3.)

            if tr.channel.endswith('N'):
                data *= math.cos(phi*d2r) * math.cos(azi*d2r)
            elif tr.channel.endswith('E'):
                data *= math.cos(phi*d2r) * math.sin(azi*d2r)
            elif tr.channel.endswith('Z'):
                data *= math.sin(phi*d2r)

            tr2 = tr.copy(data=False)
            tr2.set_ydata(data)
            tr2.set_codes(location='X')

            num.testing.assert_almost_equal(data, tr.ydata, 2)
Esempio n. 13
0
    def test_source_times(self):
        store = self.dummy_store()
        dummy_target = gf.Target()
        for S in gf.source_classes:
            if not hasattr(S, 'discretize_basesource'):
                continue

            for t in [0.0, util.str_to_time('2014-01-01 10:00:00')]:
                source = default_source(S, time=t)
                dsource = source.discretize_basesource(store,
                                                       target=dummy_target)
                cent = dsource.centroid()
                assert numeq(cent.time, t, 0.0001)
Esempio n. 14
0
    def test_process_timeseries(self):
        engine = gf.LocalEngine(use_config=True)

        sources = [
            gf.ExplosionSource(
                time=0.0,
                depth=depth,
                moment=moment)

            for moment in [2., 4., 8.] for depth in [3000., 6000., 12000.]
        ]

        targets = [
            gf.Target(
                codes=('', 'ST%d' % i, '', component),
                north_shift=shift*km,
                east_shift=0.,
                tmin=tmin,
                store_id='global_2s',
                tmax=None if tmin is None else tmin+40.)

            for component in 'ZNE' for i, shift in enumerate([100])
            for tmin in [None, 5., 20.]
        ]

        response_sum = engine.process(sources=sources, targets=targets,
                                      calc_timeseries=False, nthreads=1)

        response_calc = engine.process(sources=sources, targets=targets,
                                       calc_timeseries=True, nthreads=1)

        for (source, target, tr), (source_n, target_n, tr_n) in zip(
                response_sum.iter_results(), response_calc.iter_results()):
            assert source is source_n
            assert target is target_n

            t1 = tr.get_xdata()
            t2 = tr_n.get_xdata()
            num.testing.assert_equal(t1, t2)

            disp1 = tr.get_ydata()
            disp2 = tr_n.get_ydata()

            num.testing.assert_equal(disp1, disp2)
Esempio n. 15
0
    def test_pulse_decimate(self):
        store_dir = self.get_pulse_store_dir()

        store = gf.Store(store_dir)
        store.make_decimated(2)

        engine = gf.LocalEngine(store_dirs=[store_dir])
        # pulse = engine.get_store_extra(None, 'pulse')

        source = gf.ExplosionSource(
            time=0.0,
            depth=100.,
            moment=1.0)

        targets = [
            gf.Target(
                codes=('', 'STA', '%s' % sample_rate, component),
                sample_rate=sample_rate,
                north_shift=500.,
                east_shift=0.)

            for component in 'N'
            for sample_rate in [None, store.config.sample_rate / 2.0]
        ]

        response = engine.process(source, targets)

        trs = []
        for source, target, tr in response.iter_results():
            tr.extend(0., 1.)
            if target.sample_rate is None:
                tr.downsample_to(2./store.config.sample_rate, snap=True)

            trs.append(tr)

        tmin = max(tr.tmin for tr in trs)
        tmax = min(tr.tmax for tr in trs)

        for tr in trs:
            tr.chop(tmin, tmax)

        num.testing.assert_almost_equal(
            trs[0].ydata, trs[1].ydata, 2)
Esempio n. 16
0
    def test_discretize_rect_source(self):

        store = self.dummy_homogeneous_store()
        target = gf.Target(interpolation='nearest_neighbor')

        for source in [
                gf.RectangularSource(depth=10 * km,
                                     slip=0.5,
                                     width=5 * km,
                                     length=5 * km),
                gf.RectangularSource(depth=10 * km,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km)
        ]:

            dsource = source.discretize_basesource(store, target)
            m1 = source.get_moment(store, target)
            m2 = dsource.centroid().pyrocko_moment_tensor().scalar_moment()
            assert abs(m1 - m2) < abs(m1 + m2) * 1e-6
Esempio n. 17
0
    def test_rect_source(self):

        store = self.dummy_homogeneous_store()

        rect1 = gf.RectangularSource(depth=10 * km,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km)

        rect2 = gf.RectangularSource(
            depth=10 * km,
            slip=pmt.magnitude_to_moment(5.0) /
            (5 * km * 5 * km *
             store.config.earthmodel_1d.material(10 * km).shear_modulus()),
            width=5 * km,
            length=5 * km)

        self.assertAlmostEqual(
            rect1.get_magnitude(),
            rect2.get_magnitude(store,
                                gf.Target(interpolation='nearest_neighbor')))
Esempio n. 18
0
    def base_key(self):
        # method returns STF name and the values
        return (self.__class__.__name__, self.duration, self.anchor)


if __name__ == '__main__':

    model = 'crust2_m5_hardtop_16Hz'

    engine = LocalEngine(store_dirs=[model])

    target = gf.Target(quantity='displacement',
                       lat=0,
                       lon=1,
                       store_id=model,
                       codes=('NET', 'STA', 'LOC', 'E'),
                       tmin=10,
                       tmax=75)

    DC_tensors = [createMT_DC(mag) for mag in 4 * np.random.rand(1000)]
    CLVD_tensors = [createMT_CLVD(mag) for mag in 4 * np.random.rand(1000)]
    Iso_tensors = [createMT_Isotropic(mag) for mag in 4 * np.random.rand(1000)]

    moment_tensors = DC_tensors + CLVD_tensors + Iso_tensors

    def process(source, target):
        trace = engine.process(source, [target]).pyrocko_traces()[0]
        trace.ydata /= np.max(trace.ydata)
        return trace
Esempio n. 19
0
        def test_make_params_bench(store, dim, ntargets, interpolation,
                                   nthreads):
            source = gf.RectangularSource(lat=0.,
                                          lon=0.,
                                          depth=10 * km,
                                          north_shift=0.1,
                                          east_shift=0.1,
                                          width=dim,
                                          length=dim)

            targets = [
                gf.Target(lat=random.random() * 10.,
                          lon=random.random() * 10.,
                          north_shift=0.1,
                          east_shift=0.1) for x in xrange(ntargets)
            ]

            dsource = source.discretize_basesource(store, targets[0])
            source_coords_arr = dsource.coords5()
            mts_arr = dsource.m6s

            receiver_coords_arr = num.empty((len(targets), 5))
            for itarget, target in enumerate(targets):
                receiver = target.receiver(store)
                receiver_coords_arr[itarget, :] = \
                    [receiver.lat, receiver.lon, receiver.north_shift,
                     receiver.east_shift, receiver.depth]
            ns = mts_arr.shape[0]
            label = '_ns%04d_nt%04d_%s_np%02d' % (ns, len(targets),
                                                  interpolation, nthreads)

            @benchmark.labeled('c%s' % label)
            def make_param_c():
                return store_ext.make_sum_params(store.cstore,
                                                 source_coords_arr, mts_arr,
                                                 receiver_coords_arr,
                                                 'elastic10', interpolation,
                                                 nthreads)

            @benchmark.labeled('p%s' % label)
            def make_param_python():
                weights_c = []
                irecords_c = []
                for itar, target in enumerate(targets):
                    receiver = target.receiver(store)
                    dsource = source.discretize_basesource(store, target)

                    for i, (component, args, delays, weights) in \
                            enumerate(store.config.make_sum_params(
                                dsource, receiver)):
                        if len(weights_c) <= i:
                            weights_c.append([])
                            irecords_c.append([])

                        if interpolation == 'nearest_neighbor':
                            irecords = num.array(store.config.irecords(*args))
                            weights = num.array(weights)
                        else:
                            assert interpolation == 'multilinear'
                            irecords, ip_weights =\
                                store.config.vicinities(*args)
                            neach = irecords.size / args[0].size
                            weights = num.repeat(weights, neach) * ip_weights
                            delays = num.repeat(delays, neach)

                        weights_c[i].append(weights)
                        irecords_c[i].append(irecords)
                for c in xrange(len(weights_c)):
                    weights_c[c] = num.concatenate([w for w in weights_c[c]])
                    irecords_c[c] = num.concatenate(
                        [ir for ir in irecords_c[c]])

                return zip(weights_c, irecords_c)

            rc = make_param_c()
            rp = make_param_python()

            logger.info(benchmark.__str__(header=False))
            benchmark.clear()

            # Comparing the results
            if isinstance(store.config, gf.meta.ConfigTypeA):
                idim = 4
            elif isinstance(store.config, gf.meta.ConfigTypeB):
                idim = 8
            if interpolation == 'nearest_neighbor':
                idim = 1

            nsummands_scheme = [5, 5, 3]  # elastic8
            nsummands_scheme = [6, 6, 4]  # elastic10
            for i, nsummands in enumerate(nsummands_scheme):
                for r in [0, 1]:
                    r_c = rc[i][r]
                    r_p = rp[i][r].reshape(ntargets, nsummands, ns * idim)
                    r_p = num.transpose(r_p, axes=[0, 2, 1])

                    num.testing.assert_almost_equal(r_c, r_p.flatten())
Esempio n. 20
0
    def _off_test_synthetic(self):

        from pyrocko import gf

        km = 1000.
        nstations = 10
        edepth = 5 * km
        store_id = 'crust2_d0'

        swin = 2.
        lwin = 9. * swin
        ks = 1.0
        kl = 1.0
        kd = 3.0

        engine = gf.get_engine()
        snorths = (num.random.random(nstations) - 1.0) * 50 * km
        seasts = (num.random.random(nstations) - 1.0) * 50 * km
        targets = []
        for istation, (snorths, seasts) in enumerate(zip(snorths, seasts)):
            targets.append(
                gf.Target(quantity='displacement',
                          codes=('', 's%03i' % istation, '', 'Z'),
                          north_shift=float(snorths),
                          east_shift=float(seasts),
                          store_id=store_id,
                          interpolation='multilinear'))

        source = gf.DCSource(north_shift=50 * km,
                             east_shift=50 * km,
                             depth=edepth)

        store = engine.get_store(store_id)

        response = engine.process(source, targets)
        trs = []

        station_traces = defaultdict(list)
        station_targets = defaultdict(list)
        for source, target, tr in response.iter_results():
            tp = store.t('any_P', source, target)
            t = tp - 5 * tr.deltat + num.arange(11) * tr.deltat
            if False:
                gauss = trace.Trace(tmin=t[0],
                                    deltat=tr.deltat,
                                    ydata=num.exp(-((t - tp)**2) /
                                                  ((2 * tr.deltat)**2)))

                tr.ydata[:] = 0.0
                tr.add(gauss)

            trs.append(tr)
            station_traces[target.codes[:3]].append(tr)
            station_targets[target.codes[:3]].append(target)

        station_stalta_traces = {}
        for nsl, traces in station_traces.items():
            etr = None
            for tr in traces:
                sqr_tr = tr.copy(data=False)
                sqr_tr.ydata = tr.ydata**2
                if etr is None:
                    etr = sqr_tr
                else:
                    etr += sqr_tr

            autopick.recursive_stalta(swin, lwin, ks, kl, kd, etr)
            etr.set_codes(channel='C')

            station_stalta_traces[nsl] = etr

        trace.snuffle(trs + list(station_stalta_traces.values()))
        deltat = trs[0].deltat

        nnorth = 50
        neast = 50

        size = 200 * km

        north = num.linspace(-size, size, nnorth)
        north2 = num.repeat(north, neast)
        east = num.linspace(-size, size, neast)
        east2 = num.tile(east, nnorth)
        depth = 5 * km

        def tcal(target, i):
            try:
                return store.t(
                    'any_P',
                    gf.Location(north_shift=north2[i],
                                east_shift=east2[i],
                                depth=depth), target)

            except gf.OutOfBounds:
                return 0.0

        nsls = sorted(station_stalta_traces.keys())

        tts = num.fromiter((tcal(station_targets[nsl][0], i)
                            for i in range(nnorth * neast) for nsl in nsls),
                           dtype=num.float)

        arrays = [
            station_stalta_traces[nsl].ydata.astype(num.float) for nsl in nsls
        ]
        offsets = num.array([
            int(round(station_stalta_traces[nsl].tmin / deltat))
            for nsl in nsls
        ],
                            dtype=num.int32)
        shifts = -num.array([int(round(tt / deltat)) for tt in tts],
                            dtype=num.int32).reshape(nnorth * neast, nstations)
        weights = num.ones((nnorth * neast, nstations))

        print(shifts[25 * neast + 25] * deltat)

        print(offsets.dtype, shifts.dtype, weights.dtype)

        print('stack start')
        mat, ioff = parstack(arrays, offsets, shifts, weights, 1)
        print('stack stop')

        mat = num.reshape(mat, (nnorth, neast))

        from matplotlib import pyplot as plt

        fig = plt.figure()

        axes = fig.add_subplot(1, 1, 1, aspect=1.0)

        axes.contourf(east / km, north / km, mat)

        axes.plot(
            g(targets, 'east_shift') / km,
            g(targets, 'north_shift') / km, '^')
        axes.plot(source.east_shift / km, source.north_shift / km, 'o')
        plt.show()
Esempio n. 21
0
    def test_against_kiwi(self):
        engine = gf.get_engine()
        store_id = 'chile_70km_crust'
        try:
            store = engine.get_store(store_id)
        except gf.NoSuchStore:
            logger.warn('GF Store %s not available - skipping test' % store_id)
            return

        base_source = gf.RectangularSource(
            depth=15*km,
            strike=0.,
            dip=90.,
            rake=0.,
            magnitude=4.5,
            nucleation_x=-1.,
            length=10*km,
            width=0*km,
            stf=gf.BoxcarSTF(duration=1.0))

        base_event = base_source.pyrocko_event()

        channels = 'NEZ'
        nstations = 10
        stations = []
        targets = []
        for istation in xrange(nstations):
            dist = rand(40.*km, 900*km)
            azi = rand(-180., 180.)
            north_shift = dist * math.cos(azi*d2r)
            east_shift = dist * math.sin(azi*d2r)
            lat, lon = od.ne_to_latlon(0., 0., north_shift, east_shift)
            sta = 'S%02i' % istation
            station = model.Station(
                '', sta, '',
                lat=lat,
                lon=lon)

            station.set_channels_by_name('N', 'E', 'Z')
            stations.append(station)

            for cha in channels:
                target = gf.Target(
                    codes=station.nsl() + (cha,),
                    lat=lat,
                    lon=lon,
                    quantity='displacement',
                    interpolation='multilinear',
                    optimization='enable',
                    store_id=store_id)

                targets.append(target)

        from tunguska import glue

        nsources = 10

        # nprocs_max = multiprocessing.cpu_count()
        nprocs = 1

        try:
            seis = glue.start_seismosizer(
                gfdb_path=op.join(store.store_dir, 'db'),
                event=base_event,
                stations=stations,
                hosts=['localhost']*nprocs,
                balance_method='123321',
                effective_dt=0.5,
                verbose=False)

            ksource = to_kiwi_source(base_source)

            seis.set_source(ksource)

            recs = seis.get_receivers_snapshot(('syn',), (), 'plain')
            trs = []
            for rec in recs:
                for tr in rec.get_traces():
                    tr.set_codes(channel=transchan[tr.channel])
                    trs.append(tr)

            trs2 = engine.process(base_source, targets).pyrocko_traces()

            trace.snuffle(trs + trs2)

            seis.set_synthetic_reference()

            for sourcetype in ['point', 'rect']:
                sources = []
                for isource in xrange(nsources):
                    m = pmt.MomentTensor.random_dc()
                    strike, dip, rake = map(float, m.both_strike_dip_rake()[0])

                    if sourcetype == 'point':
                        source = gf.RectangularSource(
                            north_shift=rand(-20.*km, 20*km),
                            east_shift=rand(-20.*km, 20*km),
                            depth=rand(10*km, 20*km),
                            nucleation_x=0.0,
                            nucleation_y=0.0,
                            strike=strike,
                            dip=dip,
                            rake=rake,
                            magnitude=rand(4.0, 5.0),
                            stf=gf.BoxcarSTF(duration=1.0))

                    elif sourcetype == 'rect':
                        source = gf.RectangularSource(
                            north_shift=rand(-20.*km, 20*km),
                            east_shift=rand(-20.*km, 20*km),
                            depth=rand(10*km, 20*km),
                            length=10*km,
                            width=5*km,
                            nucleation_x=-1.,
                            nucleation_y=0,
                            strike=strike,
                            dip=dip,
                            rake=rake,
                            magnitude=rand(4.0, 5.0),
                            stf=gf.BoxcarSTF(duration=1.0))
                    else:
                        assert False

                    sources.append(source)

                for temperature in ['cold', 'hot']:
                    t0 = time.time()
                    resp = engine.process(sources, targets, nprocs=nprocs)
                    t1 = time.time()
                    if temperature == 'hot':
                        dur_pyrocko = t1 - t0

                    del resp

                ksources = map(to_kiwi_source, sources)

                for temperature in ['cold', 'hot']:
                    t0 = time.time()
                    seis.make_misfits_for_sources(
                        ksources, show_progress=False)
                    t1 = time.time()
                    if temperature == 'hot':
                        dur_kiwi = t1 - t0

                print 'pyrocko %-5s %5.2fs  %5.1fx' % (
                    sourcetype, dur_pyrocko, 1.0)
                print 'kiwi    %-5s %5.2fs  %5.1fx' % (
                    sourcetype, dur_kiwi, dur_pyrocko/dur_kiwi)

        finally:
            seis.close()
            del seis
Esempio n. 22
0
        def test_timeseries(store,
                            source,
                            dim,
                            ntargets,
                            interpolation,
                            nthreads,
                            niter=1,
                            random_itmin=False,
                            random_nsamples=False):

            source = gf.RectangularSource(lat=0.,
                                          lon=0.,
                                          depth=3 * km,
                                          length=dim,
                                          width=dim,
                                          anchor='top')

            targets = [
                gf.Target(lat=rstate.uniform(), lon=rstate.uniform())
                for x in range(ntargets)
            ]

            dsource = source.discretize_basesource(store, targets[0])
            source_coords_arr = dsource.coords5()
            mts_arr = dsource.m6s

            receiver_coords_arr = num.empty((len(targets), 5))
            for itarget, target in enumerate(targets):
                receiver = target.receiver(store)
                receiver_coords_arr[itarget, :] = \
                    [receiver.lat, receiver.lon, receiver.north_shift,
                     receiver.east_shift, receiver.depth]
            nsources = mts_arr.shape[0]
            delays = num.zeros(nsources)

            itmin = num.zeros(ntargets, dtype=num.int32)
            nsamples = num.full(ntargets, -1, dtype=num.int32)

            if random_itmin:
                itmin = num.random.randint(-20,
                                           5,
                                           size=ntargets,
                                           dtype=num.int32)

            if random_nsamples:
                nsamples = num.random.randint(10,
                                              100,
                                              size=ntargets,
                                              dtype=num.int32)

            @benchmark.labeled('calc_timeseries-%s' % interpolation)
            def calc_timeseries():
                return store_ext.store_calc_timeseries(
                    store.cstore, source_coords_arr, mts_arr, delays,
                    receiver_coords_arr, 'elastic10', interpolation, itmin,
                    nsamples, nthreads)

            @benchmark.labeled('sum_timeseries-%s' % interpolation)
            def sum_timeseries():
                results = []
                for itarget, target in enumerate(targets):
                    params = store_ext.make_sum_params(
                        store.cstore, source_coords_arr, mts_arr,
                        target.coords5[num.newaxis, :].copy(), 'elastic10',
                        interpolation, nthreads)
                    for weights, irecords in params:
                        d = num.zeros(irecords.shape[0], dtype=num.float32)
                        r = store_ext.store_sum(store.cstore, irecords, d,
                                                weights, int(itmin[itarget]),
                                                int(nsamples[itarget]))
                        results.append(r)

                return results

            for _ in range(niter):
                res_calc = calc_timeseries()
            for _ in range(niter):
                res_sum = sum_timeseries()

            for c, s in zip(res_calc, res_sum):
                num.testing.assert_equal(c[0], s[0], verbose=True)
                for cc, cs in zip(c[1:-1], s[1:]):
                    continue
                    assert cc == cs
Esempio n. 23
0
    def test_qseis_vs_ahfull(self):
        random.seed(23)

        vp = 5.8 * km
        vs = 3.46 * km

        mod = cake.LayeredModel.from_scanlines(
            cake.read_nd_model_str('''
  0. %(vp)g %(vs)g 2.6 1264. 600.
 20. %(vp)g %(vs)g 2.6 1264. 600.'''.lstrip() % dict(vp=vp / km, vs=vs / km)))

        store_id_qseis = 'homogeneous_qseis'
        store_id_ahfull = 'homogeneous_ahfull'

        qsconf = qseis.QSeisConfig()
        qsconf.qseis_version = '2006a'

        textra = 5.0

        qsconf.time_region = (gf.meta.Timing('{vel:%g}-%g' %
                                             (vp / km, textra)),
                              gf.meta.Timing('{vel:%g}+%g' %
                                             (vs / km, textra)))

        qsconf.cut = (gf.meta.Timing('{vel:%g}-%g' % (vp / km, textra)),
                      gf.meta.Timing('{vel:%g}+%g' % (vs / km, textra)))

        qsconf.relevel_with_fade_in = True

        qsconf.fade = (gf.meta.Timing('{vel:%g}-%g' % (vp / km, textra)),
                       gf.meta.Timing('{vel:%g}-%g' % (vp / km, 0.)),
                       gf.meta.Timing('{vel:%g}+%g' % (vs / km, 0.)),
                       gf.meta.Timing('{vel:%g}+%g' % (vs / km, textra)))

        qsconf.wavelet_duration_samples = 0.001
        qsconf.sw_flat_earth_transform = 0
        qsconf.filter_surface_effects = 1
        qsconf.wavenumber_sampling = 5.
        qsconf.aliasing_suppression_factor = 0.01

        sample_rate = 10.

        config = gf.meta.ConfigTypeA(
            id=store_id_qseis,
            sample_rate=sample_rate,
            receiver_depth=0. * km,
            source_depth_min=1. * km,
            source_depth_max=19 * km,
            source_depth_delta=6. * km,
            distance_min=2. * km,
            distance_max=20 * km,
            distance_delta=2 * km,
            modelling_code_id='qseis.2006a',
            earthmodel_1d=mod,
            tabulated_phases=[
                gf.meta.TPDef(id='begin', definition='p,P,p\\,P\\'),
                gf.meta.TPDef(id='end', definition='s,S,s\\,S\\'),
            ])

        config.validate()

        store_dir_qseis = mkdtemp(prefix=store_id_qseis)
        self.tempdirs.append(store_dir_qseis)

        gf.store.Store.create_editables(store_dir_qseis,
                                        config=config,
                                        extra={'qseis': qsconf})

        store = gf.store.Store(store_dir_qseis, 'r')
        store.make_ttt()
        store.close()

        try:
            qseis.build(store_dir_qseis, nworkers=1)
        except qseis.QSeisError as e:
            if str(e).find('could not start qseis') != -1:
                logger.warn('qseis not installed; '
                            'skipping test_pyrocko_gf_vs_qseis')
                return
            else:
                raise

        config = gf.meta.ConfigTypeA(
            id=store_id_ahfull,
            sample_rate=sample_rate,
            receiver_depth=0. * km,
            source_depth_min=1. * km,
            source_depth_max=19 * km,
            source_depth_delta=6. * km,
            distance_min=2. * km,
            distance_max=20 * km,
            distance_delta=2 * km,
            modelling_code_id='ahfullgreen',
            earthmodel_1d=mod,
            tabulated_phases=[
                gf.meta.TPDef(id='begin', definition='p,P,p\\,P\\'),
                gf.meta.TPDef(id='end', definition='s,S,s\\,S\\'),
            ])

        config.validate()

        store_dir_ahfull = mkdtemp(prefix=store_id_qseis)
        self.tempdirs.append(store_dir_ahfull)

        gf.store.Store.create_editables(store_dir_ahfull, config=config)

        store = gf.store.Store(store_dir_ahfull, 'r')
        store.make_ttt()
        store.close()

        ahfullgreen.build(store_dir_ahfull, nworkers=1)

        sdepth = rand(config.source_depth_min, config.source_depth_max)
        sdepth = round(
            (sdepth - config.source_depth_min)
            / config.source_depth_delta) * config.source_depth_delta \
            + config.source_depth_min

        source = gf.MTSource(lat=0., lon=0., depth=sdepth)

        source.m6 = tuple(rand(-1., 1.) for x in range(6))

        for ii in range(5):
            azi = random.random() * 365.
            dist = rand(config.distance_min, config.distance_max)
            dist = round(dist / config.distance_delta) * config.distance_delta

            dnorth = dist * math.cos(azi * d2r)
            deast = dist * math.sin(azi * d2r)

            targets = []
            for cha in 'rtz':
                target = gf.Target(quantity='displacement',
                                   codes=('', '0000', 'PG', cha),
                                   north_shift=dnorth,
                                   east_shift=deast,
                                   depth=config.receiver_depth,
                                   store_id=store_id_ahfull)

                dist = source.distance_to(target)
                azi, bazi = source.azibazi_to(target)

                if cha == 'r':
                    target.azimuth = bazi + 180.
                    target.dip = 0.
                elif cha == 't':
                    target.azimuth = bazi - 90.
                    target.dip = 0.
                elif cha == 'z':
                    target.azimuth = 0.
                    target.dip = 90.

                targets.append(target)

            runner = qseis.QSeisRunner()
            conf = qseis.QSeisConfigFull()
            conf.qseis_version = '2006a'
            conf.receiver_distances = [dist / km]
            conf.receiver_azimuths = [azi]
            conf.receiver_depth = config.receiver_depth / km
            conf.source_depth = source.depth / km

            distance_3d_max = math.sqrt(config.distance_max**2 +
                                        (config.source_depth_max -
                                         config.source_depth_min)**2)

            nsamples = trace.nextpow2(
                int(
                    math.ceil(distance_3d_max / vs * 2.0 + 2. * textra) *
                    config.sample_rate))

            conf.time_start = -textra
            conf.time_window = (nsamples - 1) / config.sample_rate
            conf.time_reduction_velocity = 0.0
            conf.nsamples = nsamples
            conf.source_mech = qseis.QSeisSourceMechMT(mnn=source.mnn,
                                                       mee=source.mee,
                                                       mdd=source.mdd,
                                                       mne=source.mne,
                                                       mnd=source.mnd,
                                                       med=source.med)
            conf.earthmodel_1d = mod

            conf.sw_flat_earth_transform = 0
            conf.filter_surface_effects = 1
            conf.wavenumber_sampling = 10.
            conf.wavelet_duration_samples = 0.001
            conf.aliasing_suppression_factor = 0.01

            conf.validate()

            runner.run(conf)

            trs = runner.get_traces()
            for tr in trs:
                pass
                tr.lowpass(4, config.sample_rate / 8., demean=False)
                tr.highpass(4, config.sample_rate / 80.)

            engine = gf.LocalEngine(
                store_dirs=[store_dir_ahfull, store_dir_qseis])

            trs2 = engine.process(source, targets).pyrocko_traces()
            for tr in trs2:
                tr.shift(config.deltat)
                tr.lowpass(4, config.sample_rate / 8., demean=False)
                tr.highpass(4, config.sample_rate / 80.)

            # trace.snuffle(trs+trs2)

            tmin = store.t('{vel:%g}' %
                           (vp / km), source, target) - textra * 0.2
            tmax = store.t('{vel:%g}' %
                           (vs / km), source, target) + textra * 0.2

            for tr in trs + trs2:
                tr.chop(tmin, tmax)

            denom = 0.0
            for cha in 'rtz':
                t1 = g(trs, cha)
                t2 = g(trs2, cha)
                denom += num.sum(t1.ydata**2) + num.sum(t2.ydata**2)

            ds = []
            for cha in 'rtz':
                t1 = g(trs, cha)
                t2 = g(trs2, cha)
                ds.append(2.0 * num.sum((t1.ydata - t2.ydata)**2) / denom)

            ds = num.array(ds)

            # if not num.all(ds < 0.05):
            #    trace.snuffle(trs+trs2)

            assert num.all(ds < 0.05)
Esempio n. 24
0
    def test_regional(self):
        engine = gf.get_engine()
        store_id = 'crust2_mf'
        try:
            engine.get_store(store_id)
        except gf.NoSuchStore:
            logger.warn('GF Store %s not available - skipping test' % store_id)
            return

        nsources = 10
        nstations = 10

        print 'cache source channels par wallclock seismograms_per_second'
        nprocs_max = multiprocessing.cpu_count()

        for sourcetype, channels in [
                ['point', 'Z'],
                ['point', 'NEZ'],
                ['rect', 'Z'],
                ['rect', 'NEZ']]:

            for nprocs in [1, 2, 4, 8, 16, 32]:
                if nprocs > nprocs_max:
                    continue

                sources = []
                for isource in xrange(nsources):
                    m = pmt.MomentTensor.random_dc()
                    strike, dip, rake = map(float, m.both_strike_dip_rake()[0])

                    if sourcetype == 'point':
                        source = gf.DCSource(
                            north_shift=rand(-20.*km, 20*km),
                            east_shift=rand(-20.*km, 20*km),
                            depth=rand(10*km, 20*km),
                            strike=strike,
                            dip=dip,
                            rake=rake,
                            magnitude=rand(4.0, 5.0))

                    elif sourcetype == 'rect':
                        source = gf.RectangularSource(
                            north_shift=rand(-20.*km, 20*km),
                            east_shift=rand(-20.*km, 20*km),
                            depth=rand(10*km, 20*km),
                            length=10*km,
                            width=5*km,
                            nucleation_x=0.,
                            nucleation_y=-1.,
                            strike=strike,
                            dip=dip,
                            rake=rake,
                            magnitude=rand(4.0, 5.0))
                    else:
                        assert False

                    sources.append(source)

                targets = []
                for istation in xrange(nstations):
                    dist = rand(40.*km, 900*km)
                    azi = rand(-180., 180.)

                    north_shift = dist * math.cos(azi*d2r)
                    east_shift = dist * math.sin(azi*d2r)

                    for cha in channels:
                        target = gf.Target(
                            codes=('', 'S%04i' % istation, '', cha),
                            north_shift=north_shift,
                            east_shift=east_shift,
                            quantity='displacement',
                            interpolation='multilinear',
                            optimization='enable',
                            store_id=store_id)

                        targets.append(target)

                ntraces = len(targets) * len(sources)
                for temperature in ['cold', 'hot']:
                    t0 = time.time()
                    resp = engine.process(sources, targets, nprocs=nprocs)
                    # print resp.stats

                    t1 = time.time()
                    duration = t1 - t0
                    sps = ntraces / duration
                    if temperature == 'hot':
                        if nprocs == 1:
                            sps_ref = sps
                        print '%-5s %-6s %-8s %3i %9.3f %12.1f %12.1f' % (
                            temperature, sourcetype, channels, nprocs, t1-t0,
                            sps, sps/sps_ref)

                    del resp
Esempio n. 25
0
    def test_rect_source(self):

        store = self.dummy_homogeneous_store()

        depth = 10 * km
        # shear
        rect1 = gf.RectangularSource(depth=10 * km,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km)

        rect2 = gf.RectangularSource(
            depth=depth,
            slip=pmt.magnitude_to_moment(5.0) /
            (5 * km * 5 * km *
             store.config.earthmodel_1d.material(depth).shear_modulus()),
            width=5 * km,
            length=5 * km)

        self.assertAlmostEqual(
            rect1.get_magnitude(),
            rect2.get_magnitude(store,
                                gf.Target(interpolation='nearest_neighbor')))

        # tensile
        rect3 = gf.RectangularSource(depth=depth,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km,
                                     opening_fraction=1.)

        rect4 = gf.RectangularSource(
            depth=depth,
            slip=pmt.magnitude_to_moment(5.0) /
            (5 * km * 5 * km *
             store.config.earthmodel_1d.material(depth).bulk()),
            width=5 * km,
            length=5 * km,
            opening_fraction=1.)

        self.assertAlmostEqual(
            rect3.get_magnitude(),
            rect4.get_magnitude(store,
                                gf.Target(interpolation='nearest_neighbor')))

        # mixed
        of = -0.4
        rect5 = gf.RectangularSource(depth=depth,
                                     magnitude=5.0,
                                     width=5 * km,
                                     length=5 * km,
                                     opening_fraction=of)

        rect6 = gf.RectangularSource(
            depth=depth,
            slip=pmt.magnitude_to_moment(5.0) /
            (5 * km * 5 * km *
             (store.config.earthmodel_1d.material(depth).bulk() * abs(of) +
              store.config.earthmodel_1d.material(depth).shear_modulus() *
              (1 - abs(of)))),
            width=5 * km,
            length=5 * km,
            opening_fraction=of)

        self.assertAlmostEqual(
            rect5.get_magnitude(),
            rect6.get_magnitude(store,
                                gf.Target(interpolation='nearest_neighbor')))
Esempio n. 26
0
        source.m6 = tuple(rand(-1., 1.) for x in xrange(6))

        for ii in xrange(5):
            azi = random.random() * 365.
            dist = rand(config.distance_min, config.distance_max)
            dist = round(dist / config.distance_delta) * config.distance_delta

            dnorth = dist * math.cos(azi * d2r)
            deast = dist * math.sin(azi * d2r)

            targets = []
            for cha in 'rtz':
                target = gf.Target(quantity='displacement',
                                   codes=('', '0000', 'PG', cha),
                                   north_shift=dnorth,
                                   east_shift=deast,
                                   depth=config.receiver_depth,
                                   store_id=store_id_ahfull)

                dist = source.distance_to(target)
                azi, bazi = source.azibazi_to(target)

                if cha == 'r':
                    target.azimuth = bazi + 180.
                    target.dip = 0.
                elif cha == 't':
                    target.azimuth = bazi - 90.
                    target.dip = 0.
                elif cha == 'z':
                    target.azimuth = 0.
                    target.dip = 90.
Esempio n. 27
0
 def get_plain_targets(self, engine, source):
     d = dict((k, getattr(self, k)) for k in gf.Target.T.propnames)
     return [gf.Target(**d)]
Esempio n. 28
0
def model(
        engine,
        store_id,
        magnitude_min, magnitude_max,
        moment_tensor,
        stress_drop_min, stress_drop_max,
        rupture_velocity_min, rupture_velocity_max,
        depth_min, depth_max,
        distance_min, distance_max,
        measures,
        nsources=400,
        nreceivers=1,
        apply_source_response_via_spectra=True,
        debug=True):

    d2r = math.pi / 180.

    components = set()
    for measure in measures:
        if not measure.components:
            raise Exception('no components given in measurement rule')

        for component in measure.components:
            components.add(component)

    components = list(components)

    data = []
    nerrors = 0
    traces_debug = []
    markers_debug = []
    for isource in xrange(nsources):
        magnitude = num.random.uniform(
            magnitude_min, magnitude_max)
        stress_drop = num.random.uniform(
            stress_drop_min, stress_drop_max)
        rupture_velocity = num.random.uniform(
            rupture_velocity_min, rupture_velocity_max)

        radius = (pmt.magnitude_to_moment(magnitude) * 7./16. /
                  stress_drop)**(1./3.)

        duration = 1.5 * radius / rupture_velocity

        if moment_tensor is None:
            mt = pmt.MomentTensor.random_dc(magnitude=magnitude)
        else:
            mt = copy.deepcopy(moment_tensor)
            mt.magnitude = magnitude

        depth = num.random.uniform(depth_min, depth_max)
        if apply_source_response_via_spectra:
            source = gf.MTSource(
                m6=mt.m6(),
                depth=depth)

            extra_responses = [
                wmeasure.BruneResponse(duration=duration)]
        else:
            source = gf.MTSource(
                m6=mt.m6(),
                depth=depth,
                stf=gf.HalfSinusoidSTF(effective_duration=duration))

            extra_responses = []

        for ireceiver in xrange(nreceivers):
            angle = num.random.uniform(0., 360.)
            distance = num.exp(num.random.uniform(
                math.log(distance_min), math.log(distance_max)))

            targets = []
            for comp in components:
                targets.append(gf.Target(
                    quantity='displacement',
                    codes=('', '%i_%i' % (isource, ireceiver), '', comp),
                    north_shift=distance*math.cos(d2r*angle),
                    east_shift=distance*math.sin(d2r*angle),
                    depth=0.,
                    store_id=store_id))

            resp = engine.process(source, targets)
            amps = []
            for measure in measures:
                comp_to_tt = {}
                for (source, target, tr) in resp.iter_results():
                    comp_to_tt[target.codes[-1]] = (target, tr)

                targets, trs = zip(*(
                    comp_to_tt[c] for c in measure.components))

                try:
                    result = wmeasure.evaluate(
                        engine, source, targets, trs,
                        extra_responses,
                        debug=debug)

                    if not debug:
                        amps.append(result)
                    else:
                        amp, trs, marker = result
                        amps.append(amp)
                        traces_debug.extend(trs)
                        markers_debug.append(marker)

                except wmeasure.AmplitudeMeasurementFailed:
                    nerrors += 1
                    amps.append(None)

            data.append([magnitude, duration, depth, distance] + amps)

    if debug:
        trace.snuffle(traces_debug, markers=markers_debug)

    return num.array(data, dtype=num.float)
Esempio n. 29
0
    def test_pyrocko_gf_vs_qseis(self):
        random.seed(2017)

        mod = cake.LayeredModel.from_scanlines(
            cake.read_nd_model_str('''
 0. 5.8 3.46 2.6 1264. 600.
 20. 5.8 3.46 2.6 1264. 600.
 20. 6.5 3.85 2.9 1283. 600.
 35. 6.5 3.85 2.9 1283. 600.
mantle
 35. 8.04 4.48 3.58 1449. 600.
 77.5 8.045 4.49 3.5 1445. 600.
 77.5 8.045 4.49 3.5 180.6 75.
 120. 8.05 4.5 3.427 180. 75.
 120. 8.05 4.5 3.427 182.6 76.06
 165. 8.175 4.509 3.371 188.7 76.55
 210. 8.301 4.518 3.324 201. 79.4
 210. 8.3 4.52 3.321 336.9 133.3
 410. 9.03 4.871 3.504 376.5 146.1
 410. 9.36 5.08 3.929 414.1 162.7
 660. 10.2 5.611 3.918 428.5 172.9
 660. 10.79 5.965 4.229 1349. 549.6'''.lstrip()))

        store_dir = mkdtemp(prefix='gfstore')
        self.tempdirs.append(store_dir)

        qsconf = qseis.QSeisConfig()
        qsconf.qseis_version = '2006a'

        qsconf.time_region = (gf.meta.Timing('0'), gf.meta.Timing('end+100'))

        qsconf.cut = (gf.meta.Timing('0'), gf.meta.Timing('end+100'))

        qsconf.wavelet_duration_samples = 0.001
        qsconf.sw_flat_earth_transform = 0

        config = gf.meta.ConfigTypeA(id='qseis_test',
                                     sample_rate=0.25,
                                     receiver_depth=0. * km,
                                     source_depth_min=10 * km,
                                     source_depth_max=10 * km,
                                     source_depth_delta=1 * km,
                                     distance_min=550 * km,
                                     distance_max=560 * km,
                                     distance_delta=1 * km,
                                     modelling_code_id='qseis.2006a',
                                     earthmodel_1d=mod,
                                     tabulated_phases=[
                                         gf.meta.TPDef(
                                             id='begin',
                                             definition='p,P,p\\,P\\'),
                                         gf.meta.TPDef(id='end',
                                                       definition='2.5'),
                                     ])

        config.validate()
        gf.store.Store.create_editables(store_dir,
                                        config=config,
                                        extra={'qseis': qsconf})

        store = gf.store.Store(store_dir, 'r')
        store.make_ttt()
        store.close()

        try:
            qseis.build(store_dir, nworkers=1)
        except qseis.QSeisError as e:
            if str(e).find('could not start qseis') != -1:
                logger.warn('qseis not installed; '
                            'skipping test_pyrocko_gf_vs_qseis')
                return
            else:
                raise

        source = gf.MTSource(lat=0., lon=0., depth=10. * km)

        source.m6 = tuple(random.random() * 2. - 1. for x in range(6))

        azi = random.random() * 365.
        dist = 553. * km

        dnorth = dist * math.cos(azi * d2r)
        deast = dist * math.sin(azi * d2r)

        targets = []
        for cha in 'rtz':
            target = gf.Target(quantity='displacement',
                               codes=('', '0000', 'PG', cha),
                               north_shift=dnorth,
                               east_shift=deast,
                               depth=config.receiver_depth,
                               store_id='qseis_test')

            dist = source.distance_to(target)
            azi, bazi = source.azibazi_to(target)

            if cha == 'r':
                target.azimuth = bazi + 180.
                target.dip = 0.
            elif cha == 't':
                target.azimuth = bazi - 90.
                target.dip = 0.
            elif cha == 'z':
                target.azimuth = 0.
                target.dip = 90.

            targets.append(target)

        runner = qseis.QSeisRunner()
        conf = qseis.QSeisConfigFull()
        conf.qseis_version = '2006a'
        conf.receiver_distances = [dist / km]
        conf.receiver_azimuths = [azi]
        conf.source_depth = source.depth / km
        conf.time_start = 0.0
        conf.time_window = 508.
        conf.time_reduction_velocity = 0.0
        conf.nsamples = 128
        conf.source_mech = qseis.QSeisSourceMechMT(mnn=source.mnn,
                                                   mee=source.mee,
                                                   mdd=source.mdd,
                                                   mne=source.mne,
                                                   mnd=source.mnd,
                                                   med=source.med)
        conf.earthmodel_1d = mod

        conf.sw_flat_earth_transform = 0

        runner.run(conf)

        trs = runner.get_traces()
        for tr in trs:
            tr.shift(-config.deltat)
            tr.snap(interpolate=True)
            tr.lowpass(4, 0.05)
            tr.highpass(4, 0.01)

        engine = gf.LocalEngine(store_dirs=[store_dir])

        def process_wrap(nthreads=0):
            @benchmark.labeled('pyrocko.gf.process (nthreads-%d)' % nthreads)
            def process(nthreads):
                return engine.process(source, targets, nthreads=nthreads)\
                    .pyrocko_traces()

            return process(nthreads)

        for nthreads in range(1, cpu_count() + 1):
            trs2 = process_wrap(nthreads)
        # print benchmark

        for tr in trs2:
            tr.snap(interpolate=True)
            tr.lowpass(4, 0.05)
            tr.highpass(4, 0.01)

        # trace.snuffle(trs+trs2)

        for cha in 'rtz':
            t1 = g(trs, cha)
            t2 = g(trs2, cha)
            tmin = max(t1.tmin, t2.tmin)
            tmax = min(t1.tmax, t2.tmax)
            t1.chop(tmin, tmax)
            t2.chop(tmin, tmax)
            d = 2.0 * num.sum((t1.ydata - t2.ydata)**2) / \
                (num.sum(t1.ydata**2) + num.sum(t2.ydata**2))

            assert d < 0.05
Esempio n. 30
0
    def call(self):
        '''Main work routine of the snuffling.'''
        self.cleanup()

        # get time range visible in viewer
        viewer = self.get_viewer()

        event = viewer.get_active_event()
        if event:
            event, stations = self.get_active_event_and_stations(
                missing='warn')
        else:
            # event = model.Event(lat=self.lat, lon=self.lon)
            event = model.Event(lat=0., lon=0.)
            stations = []

        stations = self.get_stations()

        s2c = {}
        for traces in self.chopper_selected_traces(fallback=True,
                                                   mode='visible'):
            for tr in traces:
                net, sta, loc, cha = tr.nslc_id
                ns = net, sta
                if ns not in s2c:
                    s2c[ns] = set()

                s2c[ns].add((loc, cha))

        if not stations:
            stations = []
            for (lat, lon) in [(5., 0.), (-5., 0.)]:
                s = model.Station(station='(%g, %g)' % (lat, lon),
                                  lat=lat,
                                  lon=lon)
                stations.append(s)
                viewer.add_stations(stations)

        for s in stations:
            ns = s.nsl()[:2]
            if ns not in s2c:
                s2c[ns] = set()

            for cha in 'NEZ':
                s2c[ns].add(('', cha))

        source = gf.RectangularSource(time=event.time + self.time,
                                      lat=event.lat,
                                      lon=event.lon,
                                      north_shift=self.north_km * km,
                                      east_shift=self.east_km * km,
                                      depth=self.depth_km * km,
                                      magnitude=self.magnitude,
                                      strike=self.strike,
                                      dip=self.dip,
                                      rake=self.rake,
                                      length=self.length,
                                      width=self.width,
                                      nucleation_x=self.nucleation_x,
                                      velocity=self.velocity,
                                      stf=self.get_stf())

        source.regularize()

        m = EventMarker(source.pyrocko_event())
        self.add_marker(m)

        targets = []

        if self.store_id == '<not loaded yet>':
            self.fail('Select a GF Store first')

        for station in stations:

            nsl = station.nsl()
            if nsl[:2] not in s2c:
                continue

            for loc, cha in s2c[nsl[:2]]:

                target = gf.Target(codes=(station.network, station.station,
                                          loc + '-syn', cha),
                                   quantity='displacement',
                                   lat=station.lat,
                                   lon=station.lon,
                                   depth=station.depth,
                                   store_id=self.store_id,
                                   optimization='enable',
                                   interpolation='nearest_neighbor')

                _, bazi = source.azibazi_to(target)

                if cha.endswith('T'):
                    dip = 0.
                    azi = bazi + 270.
                elif cha.endswith('R'):
                    dip = 0.
                    azi = bazi + 180.
                elif cha.endswith('1'):
                    dip = 0.
                    azi = 0.
                elif cha.endswith('2'):
                    dip = 0.
                    azi = 90.
                else:
                    dip = None
                    azi = None

                target.azimuth = azi
                target.dip = dip

                targets.append(target)

        req = gf.Request(sources=[source], targets=targets)

        req.regularize()

        try:
            resp = self.get_engine().process(req)
        except (gf.meta.OutOfBounds, gf.store_ext.StoreExtError) as e:
            self.fail(e)

        traces = resp.pyrocko_traces()

        if self.waveform_type.startswith('Velocity'):
            for tr in traces:
                tr.set_ydata(num.diff(tr.ydata) / tr.deltat)

        elif self.waveform_type.startswith('Acceleration'):
            for tr in traces:
                tr.set_ydata(num.diff(num.diff(tr.ydata)) / tr.deltat**2)

        if self.waveform_type.endswith('[nm]') or \
                self.waveform_type.endswith('[nm/s]') or \
                self.waveform_type.endswith('[nm/s^2]'):

            for tr in traces:
                tr.set_ydata(tr.ydata * 1e9)

        self.add_traces(traces)