예제 #1
0
           store_id=store_id,
           codes=('', 'STA', '', channel_code))
    for channel_code in channel_codes
]

# Let's use a double couple source representation.
source_dc = DCSource(lat=11.,
                     lon=11.,
                     depth=10000.,
                     strike=20.,
                     dip=40.,
                     rake=60.,
                     magnitude=4.)

# Processing that data will return a pyrocko.gf.Reponse object.
response = engine.process(source_dc, targets)

# This will return a list of the requested traces:
synthetic_traces = response.pyrocko_traces()

# In addition to that it is also possible to extract interpolated travel times
# of phases which have been defined in the store's config file.
store = engine.get_store(store_id)

markers = []
for t in targets:
    dist = t.distance_to(source_dc)
    depth = source_dc.depth
    arrival_time = store.t('any_P', (depth, dist))
    m = PhaseMarker(tmin=arrival_time,
                    tmax=arrival_time,
예제 #2
0
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(
        lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id),
        stations)
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                    t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(
        zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == 'velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t('begin',
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time + onset + zoom_window[0],
                          tmax=event.time + onset + zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color='black',
                            alpha=0.5)
        ax.text(zoom_window[0] * 1.09,
                y_pos,
                '%1.1f' % (s.depth / 1000.),
                horizontalalignment='right')  #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                    distances=[s.distance_to(t) * cake.m2d],
                                    zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    #fontsize=12,
                    color='black',
                    verticalalignment='top',
                    horizontalalignment='right')

            except IndexError:
                logger.warning(
                    'no pP phase at d=%s z=%s stat=%s' %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception('No Trace found!')
    if len(traces) > 1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == 'displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=onset + zoom_window[0] + correction,
                          tmax=onset + zoom_window[1] + correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0,
                0.6,
                'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure)  #, fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {
            'array-id': ''.join(station.nsl()),
            'event_name': event.name,
            'event_time': time_to_str(event.time)
        }
        ax.text(0.5,
                1.05,
                settings.title % params,
                horizontalalignment='center',
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters': ''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. ' % (
                f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1. / store.config.deltat
        cax.text(
            0,
            0,
            'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s'
            % captions,
            fontsize=12,
            transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
예제 #3
0
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(
                float(zstart) * km,
                float(zstop) * km,
                float(inkr) * km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event) == 1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(
                lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id
                                     ), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % [])
            else:
                station = station[0]
            targets = [
                station_to_target(station,
                                  quantity=settings.quantity,
                                  store_id=settings.store_id)
            ]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' %
                                   error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources,
                                             key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources,
                                              key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(
                            t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                            t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets,
                                             sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces) == 1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(
                mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(
                mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            mesh_fwidth = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            misfits_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc - fc * 2. / 5.
                fr_max = fc + fc * 2. / 5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(
                            mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(
                            mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc + width * 0.5),
                                                order=4,
                                                type='low'),
                            ButterworthResponse(corner=float(fc - width * 0.5),
                                                order=4,
                                                type='high')
                        ]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(
                            candidate=candidate,
                            setup=settings.misfit_setup,
                            debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(
                                len(test_depths) + 1, 1, i + 1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m / n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print('best fits: \n', i_best_fits)
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits * 100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
def plot(settings, show=False):

    # align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = "P"
    zoom_window = list(settings.zoom)
    ampl_scaler = "4*standard deviation"

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(":")
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.0
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = list(
        filter(lambda s: match_nslc("%s.%s.%s.*" % s.nsl(), traces[0].nslc_id),
               stations))
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning("%s  Using nearest neighbor instead." % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = (store.config.distance_max -
                                  farthest_source.distance_to(t))
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == "velocity":
            tr = integrate_differentiate(tr, "differentiate")

        onset = engine.get_store(t.store_id).t("begin",
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=event.time + onset + zoom_window[0],
            tmax=event.time + onset + zoom_window[1],
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c="black", linewidth=1.0, alpha=1.0)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color="black",
                            alpha=0.5)
        ax.text(
            zoom_window[0] * 1.09,
            y_pos,
            "%1.1f" % (s.depth / 1000.0),
            horizontalalignment="right",
        )  # , fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = "pP"
            arrivals = mod.arrivals(
                phases=[cake.PhaseDef(label)],
                distances=[s.distance_to(t) * cake.m2d],
                zstart=s.depth,
            )

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c="blue")

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    # fontsize=12,
                    color="black",
                    verticalalignment="top",
                    horizontalalignment="right",
                )

            except IndexError:
                logger.warning(
                    "no pP phase at d=%s z=%s stat=%s" %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception("No Trace found!")
    if len(traces) > 1:
        raise Exception("More then one trace provided!")
    else:
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == "displacement":
            tr = integrate_differentiate(tr, "integrate")
        tr = settings.do_filter(tr)
        onset = (engine.get_store(targets[0].store_id).t(
            "begin", (event.depth, s.distance_to(targets[0]))) + event.time)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=onset + zoom_window[0] + correction,
            tmax=onset + zoom_window[1] + correction,
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.0)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel("Time [s]")
        ax.text(
            0.0,
            0.6,
            "Source depth [km]",
            rotation=90,
            horizontalalignment="left",
            transform=fig.transFigure,
        )  # , fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0.0, c="black")
        vline.set_linestyle("--")
    if settings.title:
        params = {
            "array-id": "".join(station.nsl()),
            "event_name": event.name,
            "event_time": time_to_str(event.time),
        }
        ax.text(
            0.5,
            1.05,
            settings.title % params,
            horizontalalignment="center",
            transform=ax.transAxes,
        )
    if settings.auto_caption:
        cax = fig.add_axes([0.0, 0.0, 1, 0.05], label="caption")
        cax.axis("off")
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == "displacement":
            quantity_info = "integrated velocity trace. "
        if settings.quantity == "velocity":
            quantity_info = "differentiated synthetic traces. "
        if settings.quantity == "restituted":
            quantity_info = "restituted traces. "

        captions = {"filters": ""}
        for f in settings.filters:
            captions["filters"] += "%s-pass, order %s, f$_c$=%s Hz. " % (
                f.type,
                f.order,
                f.corner,
            )
        captions["quantity_info"] = quantity_info
        captions["store_sampling"] = 1.0 / store.config.deltat
        cax.text(
            0,
            0,
            "Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s"
            % captions,
            fontsize=12,
            transform=cax.transAxes,
        )
        plt.subplots_adjust(hspace=0.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info("save as: %s " % settings.save_as)
        options = settings.__dict__
        options.update({"array-id": "".join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches="tight")
    if show:
        plt.show()
예제 #5
0
def  doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,
                Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    if cfg.UInt ('forerun')>0:
        ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') )
    else:
        ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') )
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.keys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
                        stations.append(szo) #right number of stations?

    store_id = syn_in.store()
    engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])

    targets = []
    for st in stations:
        target = Target(
                lat=st.lat,
                lon=st.lon,
                store_id=store_id,
                codes=(st.network, st.station, st.location, 'BHZ'),
                tmin=-1900,
                tmax=3900,
                interpolation='multilinear',
                quantity=cfg.quantity())
        targets.append(target)

    if syn_in.nsources() == 1:
        if syn_in.use_specific_stf() is True:
            stf = syn_in.stf()
            exec(stf)
        else:
            stf = STF()
        if syn_in.source() == 'RectangularSource':
                source = RectangularSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    width=syn_in.width_0()*1000.,
                    length=syn_in.length_0()*1000.,
                    nucleation_x=syn_in.nucleation_x_0(),
                    slip=syn_in.slip_0(),
                    nucleation_y=syn_in.nucleation_y_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()))
        if syn_in.source() == 'DCSource':
                source = DCSource(
                    lat=float(syn_in.lat_0()),
                    lon=float(syn_in.lon_0()),
                    depth=syn_in.depth_syn_0()*1000.,
                    strike=syn_in.strike_0(),
                    dip=syn_in.dip_0(),
                    rake=syn_in.rake_0(),
                    stf=stf,
                    time=util.str_to_time(syn_in.time_0()),
                    magnitude=syn_in.magnitude_0())

    else:
        sources = []
        for i in range(syn_in.nsources()):
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)

            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                    sources.append(RectangularSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_syn_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        width=syn_in.width_1(i)*1000.,
                        length=syn_in.length_1(i)*1000.,
                        nucleation_x=syn_in.nucleation_x_1(i),
                        slip=syn_in.slip_1(i),
                        nucleation_y=syn_in.nucleation_y_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i))))

            if syn_in.source() == 'DCSource':
                    sources.append(DCSource(
                        lat=float(syn_in.lat_1(i)),
                        lon=float(syn_in.lon_1(i)),
                        depth=syn_in.depth_1(i)*1000.,
                        strike=syn_in.strike_1(i),
                        dip=syn_in.dip_1(i),
                        rake=syn_in.rake_1(i),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_1(i)),
                        magnitude=syn_in.magnitude_1(i)))
        source = CombiSource(subsources=sources)
    response = engine.process(source, targets)

    synthetic_traces = response.pyrocko_traces()
    if cfg.Bool('synthetic_test_add_noise') is True:
        from noise_addition import add_noise
        trs_orgs = []
        calcStreamMapsyn = calcStreamMap.copy()
        #from pyrocko import trace
        for tracex in calcStreamMapsyn.keys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12]) == str(tracex[4:]):
                        tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                        tr_org.downsample_to(2.0)
                        trs_orgs.append(tr_org)
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(),
                                     stations,
                                     store_id, phase_def='P')
    trs_org = []
    trs_orgs = []
    fobj = os.path.join(arrayfolder, 'shift.dat')
    xy = num.loadtxt(fobj, usecols=1, delimiter=',')
    calcStreamMapsyn = calcStreamMap.copy()
    #from pyrocko import trace
    for tracex in calcStreamMapsyn.keys():
            for trl in synthetic_traces:
                if str(trl.name()[4:12])== str(tracex[4:]):
                    mod = trl

                    recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                    trs_orgs.append(tr_org)

                    tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False)
                    synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapsyn[tracex] = synthetic_obs_tr
                    trs_org.append(tr_org_add)
    calcStreamMap = calcStreamMapsyn

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted= calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.keys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.keys():
                    calcStreamMapshifted[trace]=tr
        calcStreamMap = calcStreamMapshifted


    if cfg.Bool('shift_by_phase_onset') == True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for trace in calcStreamMapshifted.keys():
            recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp
            recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp
            mod = shifted_traces[i]
            extracted = mod.chop(recordstarttime, recordendtime, inplace=False)
            shifted_obs_tr = obspy_compat.to_obspy_trace(extracted)
            calcStreamMapshifted[trace]=shifted_obs_tr
            i = i+1

        calcStreamMap = calcStreamMapshifted


    weight = 0.
    if cfg.Bool('weight_by_noise') == True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs= []
        calcStreamMapshifted= calcStreamMap.copy()
        for trace in calcStreamMapshifted.keys():
                tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
                trs_orgs.append(tr_org)

        timing = CakeTiming(
           phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
           fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                  timing=timing,
                  fn_dump_center=pjoin(directory, 'array_center.pf'),
                  fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces, engine, event, stations,
         100., store_id, nwindows=1,
         check_events=True, phase_def='P')

    for trace in calcStreamMap.keys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ############################################################################
    traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float)
    traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float)
    latv   = num.ndarray (dimX*dimY, dtype=float)
    lonv   = num.ndarray (dimX*dimY, dtype=float)
    ############################################################################


    c=0
    streamCounter = 0

    for key in calcStreamMap.keys():
        streamID = key
        c2   = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1


        for key in TTTGridMap.keys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key


        if not streamCounter in traveltimes :
           continue                              #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ  = g.dimZ
        mint  = g.mint
        maxt  = g.maxt
        Latul = g.Latul
        Lonul = g.Lonul
        Lator = g.Lator
        Lonor = g.Lonor

        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime [c][x * dimY + y] = elem.tt
                latv [x * dimY + y] = elem.lat
                lonv [x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor


    ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ##################
    nsamp = winlen * new_frequence

    nstep = int (step*new_frequence)
    migpoints = dimX * dimY

    dimZ = 0
    new_frequence = cfg.newFrequency ()              # ['new_frequence']
    maxp = int (Config['ncore'])


    Logfile.add ('PROCESS %d  NTIMES: %d' % (flag,ntimes))

    if False :
       print ('nostat ',nostat,type(nostat))
       print ('nsamp ',nsamp,type(nsamp))
       print ('ntimes ',ntimes,type(ntimes))
       print ('nstep ',nstep,type(nstep))
       print ('dimX ',dimX,type(dimX))
       print ('dimY ',dimY,type(dimY))
       print ('mint ',Gmint,type(mint))
       print ('new_freq ',new_frequence,type(new_frequence))
       print ('minSampleCount ',minSampleCount,type(minSampleCount))
       print ('latv ',latv,type(latv))
       print ('traces',traces,type(traces))
       print ('traveltime',traveltime,type(traveltime))


#==================================semblance calculation========================================

    t1 = time.time()
    traces = traces.reshape   (1,nostat*minSampleCount)
    traveltime = traveltime.reshape (1,nostat*dimX*dimY)
    USE_C_CODE = True
    try:
        if USE_C_CODE :
            import Cm
            import CTrig
            start_time = time.time()
            k  = Cm.otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                          minSampleCount,latv,lonv,traveltime,traces)
            print("--- %s seconds ---" % (time.time() - start_time))
        else :
            start_time = time.time()
            k = otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
                      minSampleCount,latv,lonv,traveltime,traces)                       #hs
            print("--- %s seconds ---" % (time.time() - start_time))
    except:
        print("loaded tttgrid has probably wrong dimensions or stations, delete\
                ttgrid or exchange")

    t2 = time.time()


    partSemb = k

    partSemb_syn  = partSemb.reshape (ntimes,migpoints)


    return partSemb_syn
예제 #6
0
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt,
           TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder,
           syn_in):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation'))
    Logfile.add('MINT  : %f  MAXT: %f Traveltime' % (Gmint, Gmaxt))

    cfg = ConfigObj(dict=Config)
    cfg_f = FilterCfg(Config)

    timeev = util.str_to_time(ev.time)
    dimX = cfg.dimX()  #('dimx')
    dimY = cfg.dimY()  #('dimy')
    winlen = cfg.winlen()  #('winlen')
    step = cfg.step()  #('step')

    new_frequence = cfg.newFrequency()  #('new_frequence')
    forerun = cfg.Int('forerun')
    duration = cfg.Int('duration')

    nostat = len(WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount = 999999999

    ntimes = int((forerun + duration) / step)
    nsamp = int(winlen * new_frequence)
    nstep = int(step * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    lats = []
    lons = []
    for trace in calcStreamMap.iterkeys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                szo = model.Station(lat=float(il.lat),
                                    lon=float(il.lon),
                                    station=il.sta,
                                    network=il.net,
                                    channels=py_tr.channel,
                                    elevation=il.ele,
                                    location=il.loc)
                stations.append(szo)
                lats.append(float(il.lat))
                lons.append(float(il.lon))
    array_center = [num.mean(lats), num.mean(lons)]

    #==================================synthetic BeamForming======================

    if cfg.Bool('synthetic_test') is True:
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        recordstarttimes = []
        for tracex in calcStreamMap.iterkeys():
            recordstarttimes.append(
                calcStreamMap[tracex].stats.starttime.timestamp)
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMap[tracex])
            tmin = tr_org.tmin

        #tmin= num.min(recordstarttimes)
        targets = []
        sources = []
        for st in stations:
            target = Target(lat=st.lat,
                            lon=st.lon,
                            store_id=store_id,
                            codes=(st.network, st.station, st.location, 'BHZ'),
                            tmin=-6900,
                            tmax=6900,
                            interpolation='multilinear',
                            quantity=cfg.quantity())
            targets.append(target)

        if syn_in.nsources() == 1:
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)
            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                sources.append(
                    RectangularSource(
                        lat=float(syn_in.lat_0()),
                        lon=float(syn_in.lon_0()),
                        east_shift=float(syn_in.east_shift_0()) * 1000.,
                        north_shift=float(syn_in.north_shift_0()) * 1000.,
                        depth=syn_in.depth_syn_0() * 1000.,
                        strike=syn_in.strike_0(),
                        dip=syn_in.dip_0(),
                        rake=syn_in.rake_0(),
                        width=syn_in.width_0() * 1000.,
                        length=syn_in.length_0() * 1000.,
                        nucleation_x=syn_in.nucleation_x_0(),
                        slip=syn_in.slip_0(),
                        nucleation_y=syn_in.nucleation_y_0(),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_0())))
            if syn_in.source() == 'DCSource':
                sources.append(
                    DCSource(lat=float(syn_in.lat_0()),
                             lon=float(syn_in.lon_0()),
                             east_shift=float(syn_in.east_shift_0()) * 1000.,
                             north_shift=float(syn_in.north_shift_0()) * 1000.,
                             depth=syn_in.depth_syn_0() * 1000.,
                             strike=syn_in.strike_0(),
                             dip=syn_in.dip_0(),
                             rake=syn_in.rake_0(),
                             stf=stf,
                             time=util.str_to_time(syn_in.time_0()),
                             magnitude=syn_in.magnitude_0()))

        else:
            for i in range(syn_in.nsources()):
                if syn_in.use_specific_stf() is True:
                    stf = syn_in.stf()
                    exec(stf)

                else:
                    stf = STF()
                if syn_in.source() == 'RectangularSource':
                    sources.append(
                        RectangularSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            east_shift=float(syn_in.east_shift_1(i)) * 1000.,
                            north_shift=float(syn_in.north_shift_1(i)) * 1000.,
                            depth=syn_in.depth_syn_1(i) * 1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            width=syn_in.width_1(i) * 1000.,
                            length=syn_in.length_1(i) * 1000.,
                            nucleation_x=syn_in.nucleation_x_1(i),
                            slip=syn_in.slip_1(i),
                            nucleation_y=syn_in.nucleation_y_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i))))

                if syn_in.source() == 'DCSource':
                    sources.append(
                        DCSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            east_shift=float(syn_in.east_shift_1(i)) * 1000.,
                            north_shift=float(syn_in.north_shift_1(i)) * 1000.,
                            depth=syn_in.depth_syn_1(i) * 1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i)),
                            magnitude=syn_in.magnitude_1(i)))
            #source = CombiSource(subsources=sources)
        synthetic_traces = []
        for source in sources:
            response = engine.process(source, targets)
            synthetic_traces_source = response.pyrocko_traces()
            if not synthetic_traces:
                synthetic_traces = synthetic_traces_source
            else:
                for trsource, tr in zip(synthetic_traces_source,
                                        synthetic_traces):
                    tr.add(trsource)
            from pyrocko import trace as trld
            #trld.snuffle(synthetic_traces)
        timeev = util.str_to_time(syn_in.time_0())
        if cfg.Bool('synthetic_test_add_noise') is True:
            from noise_addition import add_noise
            trs_orgs = []
            calcStreamMapsyn = calcStreamMap.copy()
            #from pyrocko import trace
            for tracex in calcStreamMapsyn.iterkeys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                            trl.name()[3:13]) == str(tracex[3:]) or str(
                                trl.name()[3:11]) == str(tracex[3:]) or str(
                                    trl.name()[3:14]) == str(tracex[3:]):
                        tr_org = obspy_compat.to_pyrocko_trace(
                            calcStreamMapsyn[tracex])
                        tr_org.downsample_to(2.0)
                        trs_orgs.append(tr_org)
            store_id = syn_in.store()
            engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
            synthetic_traces = add_noise(trs_orgs,
                                         engine,
                                         source.pyrocko_event(),
                                         stations,
                                         store_id,
                                         phase_def='P')
        trs_org = []
        trs_orgs = []
        from pyrocko import trace
        fobj = os.path.join(arrayfolder, 'shift.dat')
        calcStreamMapsyn = calcStreamMap.copy()
        for tracex in calcStreamMapsyn.iterkeys():
            for trl in synthetic_traces:
                if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                        trl.name()[3:13]) == str(tracex[3:]) or str(
                            trl.name()[3:11]) == str(tracex[3:]) or str(
                                trl.name()[3:14]) == str(tracex[3:]):
                    mod = trl
                    recordstarttime = calcStreamMapsyn[
                        tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapsyn[
                        tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(
                        calcStreamMapsyn[tracex])
                    if switch == 0:
                        tr_org.bandpass(4, cfg_f.flo(), cfg_f.fhi())
                    elif switch == 1:
                        tr_org.bandpass(4, cfg_f.flo2(), cfg_f.fhi2())
                    trs_orgs.append(tr_org)
                    tr_org_add = mod.chop(recordstarttime,
                                          recordendtime,
                                          inplace=False)
                    synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapsyn[tracex] = synthetic_obs_tr
                    trs_org.append(tr_org_add)
        calcStreamMap = calcStreamMapsyn

    if cfg.Bool('shift_by_phase_pws') == True:
        calcStreamMapshifted = calcStreamMap.copy()
        from obspy.core import stream
        stream = stream.Stream()
        for trace in calcStreamMapshifted.iterkeys():
            stream.append(calcStreamMapshifted[trace])
        pws_stack = PWS_stack([stream], weight=2, normalize=True)
        for tr in pws_stack:
            for trace in calcStreamMapshifted.iterkeys():
                calcStreamMapshifted[trace] = tr
        calcStreamMap = calcStreamMapshifted

    if cfg.Bool('shift_by_phase_cc') is True:
        from stacking import align_traces
        calcStreamMapshifted = calcStreamMap.copy()
        list_tr = []
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = calcStreamMapshifted[trace]
            list_tr.append(tr_org)
        shifts, ccs = align_traces(list_tr, 10, master=False)
        for shift in shifts:
            for trace in calcStreamMapshifted.iterkeys():
                tr_org = obspy_compat.to_pyrocko_trace(
                    calcStreamMapshifted[trace])
                tr_org.shift(shift)
                shifted = obspy_compat.to_obspy_trace(tr_org)
                calcStreamMapshifted[trace] = shifted
        calcStreamMap = calcStreamMapshifted

    if cfg.Bool('shift_by_phase_onset') is True:
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
            trs_orgs.append(tr_org)

        timing = CakeTiming(
            phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
            fallback_time=100.)
        traces = trs_orgs

        event = model.Event(lat=float(ev.lat),
                            lon=float(ev.lon),
                            depth=ev.depth * 1000.,
                            time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                                    timing=timing,
                                    fn_dump_center=pjoin(
                                        directory, 'array_center.pf'),
                                    fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        for tracex in calcStreamMapshifted.iterkeys():
            for trl in shifted_traces:
                if str(trl.name()[4:12]) == str(tracex[4:]) or str(
                        trl.name()[3:13]) == str(tracex[3:]) or str(
                            trl.name()[3:11]) == str(tracex[3:]) or str(
                                trl.name()[3:14]) == str(tracex[3:]):
                    mod = trl
                    recordstarttime = calcStreamMapshifted[
                        tracex].stats.starttime.timestamp
                    recordendtime = calcStreamMapshifted[
                        tracex].stats.endtime.timestamp
                    tr_org = obspy_compat.to_pyrocko_trace(
                        calcStreamMapshifted[tracex])
                    tr_org_add = mod.chop(recordstarttime,
                                          recordendtime,
                                          inplace=False)
                    shifted_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                    calcStreamMapshifted[tracex] = shifted_obs_tr
        calcStreamMap = calcStreamMapshifted

    weight = 1.
    if cfg.Bool('weight_by_noise') is True:
        from noise_analyser import analyse
        pjoin = os.path.join
        timeev = util.str_to_time(ev.time)
        trs_orgs = []
        calcStreamMapshifted = calcStreamMap.copy()
        for trace in calcStreamMapshifted.iterkeys():
            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace])
            trs_orgs.append(tr_org)

        timing = CakeTiming(
            phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20',
            fallback_time=100.)
        traces = trs_orgs
        event = model.Event(lat=float(ev.lat),
                            lon=float(ev.lon),
                            depth=ev.depth * 1000.,
                            time=timeev)
        directory = arrayfolder
        bf = BeamForming(stations, traces, normalize=True)
        shifted_traces = bf.process(event=event,
                                    timing=timing,
                                    fn_dump_center=pjoin(
                                        directory, 'array_center.pf'),
                                    fn_beam=pjoin(directory, 'beam.mseed'))
        i = 0
        store_id = syn_in.store()
        engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
        weight = analyse(shifted_traces,
                         engine,
                         event,
                         stations,
                         100.,
                         store_id,
                         nwindows=1,
                         check_events=True,
                         phase_def='P')

    if cfg.Bool('array_response') is True:
        from obspy.signal import array_analysis
        from obspy.core import stream
        ntimesr = int((forerun + duration) / step)
        nsampr = int(winlen)
        nstepr = int(step)
        sll_x = -3.0
        slm_x = 3.0
        sll_y = -3.0
        slm_y = 3.0
        sl_s = 0.03,
        # sliding window properties

        # frequency properties
        frqlow = 1.0,
        frqhigh = 8.0
        prewhiten = 0
        # restrict output
        semb_thres = -1e9
        vel_thres = -1e9
        stime = stime
        etime = etime
        stream_arr = stream.Stream()
        for trace in calcStreamMapshifted.iterkeys():
            stream_arr.append(calcStreamMapshifted[trace])
        results = array_analysis.array_processing(stream_arr, nsamp, nstep,\
                                                  sll_x, slm_x, sll_y, slm_y,\
                                                   sl_s, semb_thres, vel_thres, \
                                                   frqlow, frqhigh, stime, \
                                                   etime, prewhiten)
        timestemp = results[0]
        relative_relpow = results[1]
        absolute_relpow = results[2]

    for trace in calcStreamMap.iterkeys():
        recordstarttime = calcStreamMap[trace].stats.starttime
        d = calcStreamMap[trace].stats.starttime
        d = d.timestamp

        if calcStreamMap[trace].stats.npts < minSampleCount:
            minSampleCount = calcStreamMap[trace].stats.npts

    ###########################################################################

    traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount),
                         dtype=float)
    traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY),
                             dtype=float)

    latv = num.ndarray(dimX * dimY, dtype=float)
    lonv = num.ndarray(dimX * dimY, dtype=float)
    ###########################################################################

    c = 0
    streamCounter = 0

    for key in calcStreamMap.iterkeys():
        streamID = key
        c2 = 0

        for o in calcStreamMap[key]:
            if c2 < minSampleCount:
                traces[c][c2] = o

                c2 += 1

        for key in TTTGridMap.iterkeys():

            if streamID == key:
                traveltimes[streamCounter] = TTTGridMap[key]
            else:
                "NEIN", streamID, key

        if not streamCounter in traveltimes:
            continue  #hs : thread crashed before

        g = traveltimes[streamCounter]
        dimZ = g.dimZ
        mint = g.mint
        gridElem = g.GridArray

        for x in range(dimX):
            for y in range(dimY):
                elem = gridElem[x, y]

                traveltime[c][x * dimY + y] = elem.tt
                latv[x * dimY + y] = elem.lat
                lonv[x * dimY + y] = elem.lon
        #endfor

        c += 1
        streamCounter += 1

    #endfor

    ################ CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ########
    nsamp = winlen * new_frequence

    nstep = step * new_frequence
    migpoints = dimX * dimY

    dimZ = 0
    maxp = int(Config['ncore'])

    Logfile.add('PROCESS %d  NTIMES: %d' % (flag, ntimes))

    if False:
        print('nostat ', nostat, type(nostat))
        print('nsamp ', nsamp, type(nsamp))
        print('ntimes ', ntimes, type(ntimes))
        print('nstep ', nstep, type(nstep))
        print('dimX ', dimX, type(dimX))
        print('dimY ', dimY, type(dimY))
        print('mint ', Gmint, type(mint))
        print('new_freq ', new_frequence, type(new_frequence))
        print('minSampleCount ', minSampleCount, type(minSampleCount))
        print('latv ', latv, type(latv))
        print('traces', traces, type(traces))

#===================compressed sensing=================================
    try:
        cs = cfg.cs()
    except:
        cs = 0
    if cs == 1:
        csmaxvaluev = num.ndarray(ntimes, dtype=float)
        csmaxlatv = num.ndarray(ntimes, dtype=float)
        csmaxlonv = num.ndarray(ntimes, dtype=float)
        folder = Folder['semb']
        fobjcsmax = open(os.path.join(folder, 'csmax_%s.txt' % (switch)), 'w')
        traveltimes = traveltime.reshape(1, nostat * dimX * dimY)
        traveltime2 = toMatrix(traveltimes, dimX * dimY)  # for relstart
        traveltime = traveltime.reshape(dimX * dimY, nostat)
        import matplotlib as mpl
        import scipy.optimize as spopt
        import scipy.fftpack as spfft
        import scipy.ndimage as spimg
        import cvxpy as cvx
        import matplotlib.pyplot as plt
        A = spfft.idct(traveltime, norm='ortho', axis=0)
        n = (nostat * dimX * dimY)
        vx = cvx.Variable(dimX * dimY)
        res = cvx.Variable(1)
        objective = cvx.Minimize(cvx.norm(res, 1))
        back2 = num.zeros([dimX, dimY])
        l = int(nsamp)
        fobj = open(
            os.path.join(folder,
                         '%s-%s_%03d.cs' % (switch, Origin['depth'], l)), 'w')
        for i in range(ntimes):
            ydata = []
            try:
                for tr in traces:
                    relstart = int((dimX * dimY - mint) * new_frequence +
                                   0.5) + i * nstep
                    tr = spfft.idct(tr[relstart + i:relstart + i +
                                       dimX * dimY],
                                    norm='ortho',
                                    axis=0)

                    ydata.append(tr)
                    ydata = num.asarray(ydata)
                    ydata = ydata.reshape(dimX * dimY, nostat)

                    constraints = [
                        res == cvx.sum_entries(0 + num.sum([
                            ydata[:, x] - A[:, x] * vx for x in range(nostat)
                        ]))
                    ]

                    prob = cvx.Problem(objective, constraints)
                    result = prob.solve(verbose=False, max_iters=200)

                    x = num.array(vx.value)
                    x = num.squeeze(x)
                    back1 = x.reshape(dimX, dimY)
                    sig = spfft.idct(x, norm='ortho', axis=0)
                    back2 = back2 + back1
                    xs = num.array(res.value)
                    xs = num.squeeze(xs)
                    max_cs = num.max(back1)
                    idx = num.where(back1 == back1.max())
                    csmaxvaluev[i] = max_cs
                    csmaxlatv[i] = latv[idx[0]]
                    csmaxlonv[i] = lonv[idx[1]]
                    fobj.write('%.5f %.5f %.20f\n' %
                               (latv[idx[0]], lonv[idx[1]], max_cs))
                    fobjcsmax.write('%.5f %.5f %.20f\n' %
                                    (latv[idx[0]], lonv[idx[1]], max_cs))
                fobj.close()
                fobjcsmax.close()

            except:
                pass

#==================================semblance calculation========================================

    t1 = time.time()
    traces = traces.reshape(1, nostat * minSampleCount)

    traveltimes = traveltime.reshape(1, nostat * dimX * dimY)
    USE_C_CODE = False
    #try:
    if USE_C_CODE:
        import Cm
        import CTrig
        start_time = time.time()
        k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                     new_frequence, minSampleCount, latv, lonv, traveltimes,
                     traces)
        print("--- %s seconds ---" % (time.time() - start_time))
    else:
        start_time = time.time()
        ntimes = int((forerun + duration) / step)
        nsamp = int(winlen)
        nstep = int(step)
        Gmint = cfg.Int('forerun')
        k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint,
                  new_frequence, minSampleCount, latv, lonv, traveltimes,
                  traces, calcStreamMap, timeev)
        print("--- %s seconds ---" % (time.time() - start_time))
    #except ValueError:
    #        k  = Cm.otest(maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence,
    #                      minSampleCount,latv,lonv,traveltimes,traces)
    #    print "loaded tttgrid has probably wrong dimensions or stations,\
    #                delete ttgrid or exchange is recommended"

    t2 = time.time()

    Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1)))

    partSemb = k
    partSemb = partSemb.reshape(ntimes, migpoints)

    return partSemb, weight, array_center
예제 #7
0
def get_parallel_mtqt(i,
                      targets,
                      store_id,
                      post,
                      pre,
                      stations,
                      mod,
                      params,
                      strikes,
                      dips,
                      rakes,
                      vs,
                      ws,
                      store_dirs,
                      batch_loading=192,
                      npm=0,
                      dump_full=False,
                      seiger1f=False,
                      path_count=0,
                      paths_disks=None,
                      con_line=True,
                      max_rho=0.,
                      mag=-6):

    engine = LocalEngine(store_superdirs=[store_dirs])
    store = engine.get_store(store_id)
    lat, lon, depth = params
    traces_uncuts = []
    tracess = []
    sources = []
    mtqt_ps = []
    count = 0
    npm = len(strikes) * len(dips) * len(rakes) * len(vs) * len(ws)
    npm_rem = npm
    data_events = []
    labels_events = []
    events = []
    if seiger1f is True:
        current_path = paths_disks[path_count]
    k = 0
    for strike in strikes:
        for dip in dips:
            for rake in rakes:
                for v in vs:
                    for w in ws:
                        name = "scenario" + str(i)
                        event = model.Event(name=name,
                                            lat=lat,
                                            lon=lon,
                                            magnitude=mag,
                                            depth=depth)
                        kappa = strike
                        sigma = rake
                        h = dip

                        source_mtqt = MTQTSource(lon=lon,
                                                 lat=lat,
                                                 depth=depth,
                                                 w=w,
                                                 v=v,
                                                 kappa=kappa,
                                                 sigma=sigma,
                                                 h=h,
                                                 magnitude=mag)

                        response = engine.process(source_mtqt, targets)
                        traces_synthetic = response.pyrocko_traces()

                        event.moment_tensor = source_mtqt.pyrocko_moment_tensor(
                        )

                        if dump_full is True:
                            traces_uncut = copy.deepcopy(traces)
                            traces_uncuts.append(traces_uncut)
                        traces = []
                        for tr in traces_synthetic:
                            for st in stations:
                                if st.station == tr.station:
                                    processed = False
                                    dist = (orthodrome.distance_accurate50m(
                                        source_mtqt.lat, source_mtqt.lon,
                                        st.lat, st.lon) + st.elevation
                                            )  #*cake.m2d
                                    while processed is False:
                                        processed = False
                                        depth = source_mtqt.depth
                                        arrival = store.t('P', (depth, dist))

                                        if processed is False:
                                            tr.chop(arrival - pre,
                                                    arrival + post)
                                            traces.append(tr)
                                            processed = True
                                        else:
                                            pass
                            nsamples = len(tr.ydata)

                        rho = 1
                        mtqt_ps = [[rho, v, w, kappa, sigma, h]]

                        data_event, nsamples = prepare_waveforms([traces])
                        label_event = prepare_labels([event], mtqt_ps)
                        data_events.append(data_event)
                        labels_events.append(label_event)
                        events.append(event)

                        if count == batch_loading or npm_rem < batch_loading:
                            npm_rem = npm_rem - batch_loading
                            k = k + 1
                            if seiger1f is True:
                                free = os.statvfs(current_path)[
                                    0] * os.statvfs(current_path)[4]
                                if free < 80000:
                                    current_path = paths_disks[path_count + 1]
                                    path_count = path_count + 1
                                f = open(
                                    current_path +
                                    "grid_%s_%s_%s_%s/batch_%s_grid_%s_SDR%s_%s_%s_%s_%s"
                                    % (store_id, lat, lon, int(depth), count,
                                       i, strike, dip, rake, v, w), 'ab')
                                pickle.dump([
                                    data_events, labels_events, nstations,
                                    nsamples, events
                                ], f)
                                f.close()
                            else:
                                util.ensuredir(
                                    "grids/grid_%s_%s_%s_%s/" %
                                    (store_id, lat, lon, int(depth)))
                                f = open(
                                    "grids/grid_%s_%s_%s_%s/batch_%s_grid_%s_SDR%s_%s_%s_%s_%s"
                                    % (store_id, lat, lon, int(depth), count,
                                       i, strike, dip, rake, v, w), 'ab')
                                pickle.dump([
                                    data_events, labels_events, nsamples,
                                    events
                                ], f)
                                f.close()

                            count = 0
                            data_events = []
                            labels_events = []
                            events = []
                            traces_uncuts = []
                            mtqt_ps = []

                        else:
                            count = count + 1
    return []
예제 #8
0
class patch:
    def __init__(self,
                 name,
                 ss,
                 ds,
                 east,
                 north,
                 down,
                 length,
                 width,
                 strike,
                 dip,
                 sig_ss,
                 sig_ds,
                 sig_east,
                 sig_north,
                 sig_down,
                 sig_length,
                 sig_width,
                 sig_strike,
                 sig_dip,
                 dist='Unif',
                 connectivity=False,
                 conservation=False):

        self.name = name
        self.ss, self.sss = ss, sig_ss
        self.ds, self.sds = ds, sig_ds
        self.x1, self.sx1 = north, sig_north  # north top-left corner
        self.x2, self.sx2 = east, sig_east  # east top-left corner
        self.x3, self.sx3 = down, sig_down
        self.l, self.sl = length, sig_length
        self.w, self.sw = width, sig_width
        self.strike, self.sstrike = strike, sig_strike
        self.dip, self.sdip = dip, sig_dip
        self.dist = dist
        self.connectivity = connectivity
        self.conservation = conservation
        # initiate variable
        self.connectindex = 0
        # set uncertainties to 0 for connected patches
        if self.connectivity is not False:
            self.sstrike, self.sx3, self.sx2, self.sx1 = 0, 0, 0, 0

        # create model vector
        self.param = ['{} strike slip'.format(self.name),'{} dip slip'.format(self.name),\
        '{} north'.format(self.name),'{} east'.format(self.name),'{} down'.format(self.name),\
        '{} length'.format(self.name),'{} width'.format(self.name),'{} strike'.format(self.name),\
        '{} dip'.format(self.name)]

        self.m = self.tolist()
        self.sigmam = self.sigtolist()
        # self.mmin = list(map(operator.sub, self.m, self.sigmam))
        # self.mmax = list(map(operator.add, self.m, self.sigmam))

        # number of parameters per patch
        self.Mpatch = len(self.m)

    def connect(self, seg):
        # set strike
        self.strike = seg.strike

        # compute vertical distance and depth
        self.x3 = seg.x3 - self.w * math.sin(np.deg2rad(self.dip))

        # compute horizontal distance
        yp = math.cos(np.deg2rad(self.dip)) * self.w
        east_shift = -math.cos(np.deg2rad(seg.strike)) * yp
        north_shift = math.sin(np.deg2rad(seg.strike)) * yp
        self.x2, self.x1 = seg.x2 + east_shift, seg.x1 + north_shift

        # set uncertainties to 0
        # self.sstrike, self.sx3, self.sx2, self.sx1 = 0, 0, 0, 0

        # update m vector !!!! dangerous !!!!
        self.m = self.tolist()
        # self.sigmam = self.sigtolist()

    def build_prior(self):
        self.sampled = []
        self.fixed = []
        self.priors = []
        self.mmin, self.mmax = [], []

        for name, m, sig in zip(self.param, self.m, self.sigmam):
            if sig > 0.:
                # print name, m-sig, m+sig
                self.mmin.append(m - sig), self.mmax.append(m + sig)
                if self.dist == 'Normal':
                    p = pymc.Normal(name, mu=m, sd=sig)
                elif self.dist == 'Unif':
                    p = pymc.Uniform(name,
                                     lower=m - sig,
                                     upper=m + sig,
                                     value=m)
                else:
                    print(
                        'Problem with prior distribution difinition of parameter {}'
                        .format(name))
                    sys.exit(1)
                self.sampled.append(name)
                self.priors.append(p)
            elif sig == 0:
                self.fixed.append(name)
            else:
                print('Problem with prior difinition of parameter {}'.format(
                    name))
                sys.exit(1)
        # number of free parameters per patch
        self.Mfree = len(self.sampled)

    def info(self):
        print "name segment:", self.name
        print "# ss     ds     x1(km)     x2(km)     x3(km)    length(km)     width(km)   strike   dip  "
        print ' {:.2f}   {:.2f}   {:.1f}   {:.1f}   {:.2f}   {:.2f}   {:.2f}    {:d}     {:d}'.\
        format(*(self.tolist()))
        print "#sigma_ss   sigma_ds   sigma_x1  sigma_x2  sigma_x3  sigma_length  sigma_width   sigma_strike  sigma_dip  "
        print '  {:.2f}   {:.2f}   {:.1f}   {:.1f}   {:.2f}   {:.2f}   {:.2f}    {:d}     {:d}'.\
        format(*(self.sigtolist()))
        print

    def tolist(self):
        return [
            self.ss, self.ds, self.x1, self.x2, self.x3, self.l, self.w,
            int(self.strike),
            int(self.dip)
        ]

    def sigtolist(self):
        return [
            self.sss, self.sds, self.sx1, self.sx2, self.sx3, self.sl, self.sw,
            int(self.sstrike),
            int(self.sdip)
        ]

    def loadEngine(self, store, store_path):
        # load engine
        self.eng = LocalEngine(store_superdirs=store_path,
                               default_store_id=store)

    def engine(self, target, ref):

        # print store_path, store
        # print ref[0], ref[1]
        # print self.x1*1000., self.x2*1000., self.x3*1000., self.w*1000., self.l*1000., self.dip,
        # print np.rad2deg(math.atan2(self.ds,self.ss)), self.strike, self.time, (self.ss**2+self.ds**2)**0.5
        # print

        # print self.time
        self.source = RectangularSource(
            lon=ref[0],
            lat=ref[1],
            # distances in meters
            north_shift=np.float(self.x1 * 1000.),
            east_shift=np.float(self.x2 * 1000.),
            depth=np.float(self.x3 * 1000.),
            width=np.float(self.w * 1000.),
            length=np.float(self.l * 1000.),
            # angles in degree
            dip=np.float(self.dip),
            rake=np.float(np.rad2deg(math.atan2(self.ds, self.ss))),
            strike=np.float(self.strike),
            slip=np.float((self.ss**2 + self.ds**2)**0.5),
            time=self.time,
            anchor='top')
        # print self.source

        return self.eng.process(self.source, target)
        store_id=store_id,
        codes=('', 'STA', '', channel_code))
    for channel_code in channel_codes]

# Let's use a double couple source representation.
source_dc = DCSource(
    lat=11.,
    lon=11.,
    depth=10000.,
    strike=20.,
    dip=40.,
    rake=60.,
    magnitude=4.)

# Processing that data will return a pyrocko.gf.Reponse object.
response = engine.process(source_dc, targets)

# This will return a list of the requested traces:
synthetic_traces = response.pyrocko_traces()

# In addition to that it is also possible to extract interpolated travel times
# of phases which have been defined in the store's config file.
store = engine.get_store(store_id)

markers = []
for t in targets:
    dist = t.distance_to(source_dc)
    depth = source_dc.depth
    arrival_time = store.t('begin', (depth, dist))
    m = PhaseMarker(tmin=arrival_time,
                    tmax=arrival_time,
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event)==1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % []) 
            else:
                station = station[0]
            targets = [station_to_target(station, quantity=settings.quantity, store_id=settings.store_id)]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' % error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets, sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces)==1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            mesh_fwidth = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            misfits_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc-fc*2./5.
                fr_max = fc+fc*2./5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc+width*0.5), order=4, type='low'),
                            ButterworthResponse(corner=float(fc-width*0.5), order=4, type='high')]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(candidate=candidate, setup=settings.misfit_setup, debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(len(test_depths)+1, 1, i+1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m/n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print 'best fits: \n', i_best_fits
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits*100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return 

    event = model.load_events(settings.event_filename)
    assert len(event)==1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
    assert len(station) == 1
    station = station[0] 
    targets = [station_to_target(station, quantity=quantity, store_id=settings.store_id)]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz-minz)*0.02
    for s, t, tr in request.iter_results():
        if quantity=='velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t(
            'begin', (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata()-onset-s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time+onset+zoom_window[0],
                          tmax=event.time+onset+zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata/ampl_scale)*relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color='black', alpha=0.5)
        ax.text(zoom_window[0]*1.09, y_pos, '%1.1f' % (s.depth/1000.), horizontalalignment='right') #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                      distances=[s.distance_to(t)*cake.m2d],
                                      zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t-onset]*2
                y = [y_pos-(maxz-minz)*0.025, y_pos+(maxz-minz)*0.025]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(x_marker[1]-x_marker[1]*0.005, y[1], label,
                        #fontsize=12,
                        color='black',
                        verticalalignment='top',
                        horizontalalignment='right')

            except IndexError:
                logger.warning('no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t)*cake.m2d,
                                                                     s.depth, station.station))
                pass

    if len(traces)==0:
        raise Exception('No Trace found!')
    if len(traces)>1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity=='displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata()-onset+correction
        tr_ydata = tr.get_ydata() *-1
        visible = tr.chop(tmin=onset+zoom_window[0]+correction,
                          tmax=onset+zoom_window[1]+correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata/ampl_scale * settings.gain*settings.gain_record)*relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin-zrange*0.2, zmax+zrange*0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0, 0.6, 'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure) #, fontsize=12.)

    if fill:
        ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color=settings.color, alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax+zrange*0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {'array-id': ''.join(station.nsl()),
                  'event_name': event.name,
                  'event_time': time_to_str(event.time)}
        ax.text(0.5, 1.05, settings.title % params,
                horizontalalignment='center', 
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters':''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. '%(f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1./store.config.deltat
        cax.text(0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions,
                 fontsize=12, transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
예제 #12
0
def  doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap,
                Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter):
    '''
    method for calculating semblance of one station array
    '''
    Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') )
    Logfile.add ('MINT  : %f  MAXT: %f Traveltime' % (Gmint,Gmaxt))

    cfg = ConfigObj (dict=Config)

    dimX   = cfg.dimX()         # ('dimx')
    dimY   = cfg.dimY()         # ('dimy')
    winlen = cfg.winlen ()      # ('winlen')
    step   = cfg.step()         # ('step')

    new_frequence   = cfg.newFrequency()          #('new_frequence')
    forerun= cfg.Int('forerun')
    duration= cfg.Int('duration')
    gridspacing = cfg.Float('gridspacing')

    nostat = len (WaveformDict)
    traveltimes = {}
    recordstarttime = ''
    minSampleCount  = 999999999

    ntimes = int ((forerun + duration)/step)
    nsamp  = int (winlen * new_frequence)
    nstep  = int (step   * new_frequence)
    from pyrocko import obspy_compat
    from pyrocko import orthodrome, model
    obspy_compat.plant()

    ############################################################################
    calcStreamMap = WaveformDict

    stations = []
    py_trs = []
    for trace in calcStreamMap.iterkeys():
        py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace])
        py_trs.append(py_tr)
        for il in FilterMetaData:
            if str(il) == str(trace):
                        szo = model.Station(lat=il.lat, lon=il.lon,
                                            station=il.sta, network=il.net,
                                            channels=py_tr.channel,
                                            elevation=il.ele, location=il.loc)
			stations.append(szo) #right number of stations?


    store_id = syn_in.store()
    engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])

        targets = []
        for st in stations:
            target = Target(
                    lat=st.lat,
                    lon=st.lon,
                    store_id=store_id,
                    codes=(st.network, st.station, st.location, 'BHZ'),
                    tmin=-1900,
                    tmax=3900,
                    interpolation='multilinear',
                    quantity=cfg.quantity())
            targets.append(target)

        if syn_in.nsources() == 1:
            if syn_in.use_specific_stf() is True:
                stf = syn_in.stf()
                exec(stf)
            else:
                stf = STF()
            if syn_in.source() == 'RectangularSource':
                    source = RectangularSource(
                        lat=float(syn_in.lat_0()),
                        lon=float(syn_in.lon_0()),
                        depth=syn_in.depth_syn_0()*1000.,
                        strike=syn_in.strike_0(),
                        dip=syn_in.dip_0(),
                        rake=syn_in.rake_0(),
                        width=syn_in.width_0()*1000.,
                        length=syn_in.length_0()*1000.,
                        nucleation_x=syn_in.nucleation_x_0(),
                        slip=syn_in.slip_0(),
                        nucleation_y=syn_in.nucleation_y_0(),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_0()))
            if syn_in.source() == 'DCSource':
                    source = DCSource(
                        lat=float(syn_in.lat_0()),
                        lon=float(syn_in.lon_0()),
                        depth=syn_in.depth_syn_0()*1000.,
                        strike=syn_in.strike_0(),
                        dip=syn_in.dip_0(),
                        rake=syn_in.rake_0(),
                        stf=stf,
                        time=util.str_to_time(syn_in.time_0()),
                        magnitude=syn_in.magnitude_0())

        else:
            sources = []
            for i in range(syn_in.nsources()):
                if syn_in.use_specific_stf() is True:
                    stf = syn_in.stf()
                    exec(stf)

                else:
                    stf = STF()
                if syn_in.source() == 'RectangularSource':
                        sources.append(RectangularSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            depth=syn_in.depth_syn_1(i)*1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            width=syn_in.width_1(i)*1000.,
                            length=syn_in.length_1(i)*1000.,
                            nucleation_x=syn_in.nucleation_x_1(i),
                            slip=syn_in.slip_1(i),
                            nucleation_y=syn_in.nucleation_y_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i))))

                if syn_in.source() == 'DCSource':
                        sources.append(DCSource(
                            lat=float(syn_in.lat_1(i)),
                            lon=float(syn_in.lon_1(i)),
                            depth=syn_in.depth_1(i)*1000.,
                            strike=syn_in.strike_1(i),
                            dip=syn_in.dip_1(i),
                            rake=syn_in.rake_1(i),
                            stf=stf,
                            time=util.str_to_time(syn_in.time_1(i)),
                            magnitude=syn_in.magnitude_1(i)))
            source = CombiSource(subsources=sources)
        response = engine.process(source, targets)

        synthetic_traces = response.pyrocko_traces()
        if cfg.Bool('synthetic_test_add_noise') is True:
            from noise_addition import add_noise
            trs_orgs = []
            calcStreamMapsyn = calcStreamMap.copy()
            #from pyrocko import trace
            for tracex in calcStreamMapsyn.iterkeys():
                    for trl in synthetic_traces:
                        if str(trl.name()[4:12])== str(tracex[4:]):
                            tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                            tr_org.downsample_to(2.0)
                            trs_orgs.append(tr_org)
            store_id = syn_in.store()
            engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()])
            synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(),
                                         stations,
                                         store_id, phase_def='P')
        trs_org = []
        trs_orgs = []
        fobj = os.path.join(arrayfolder, 'shift.dat')
        xy = num.loadtxt(fobj, usecols=1, delimiter=',')
        calcStreamMapsyn = calcStreamMap.copy()
        #from pyrocko import trace
        for tracex in calcStreamMapsyn.iterkeys():
                for trl in synthetic_traces:
                    if str(trl.name()[4:12])== str(tracex[4:]):
                        mod = trl

                        recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp
                        recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp
                        tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex])
                        trs_orgs.append(tr_org)

                        tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False)
                        synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add)
                        calcStreamMapsyn[tracex] = synthetic_obs_tr
                        trs_org.append(tr_org_add)
        calcStreamMap = calcStreamMapsyn