Example #1
0
    def finish(self, reference_nsl, fband, taper):
        for tr in self.traces:
            if len(tr.ydata) > 0 and num.max(num.abs(tr.get_ydata())) != 0:
                tr_backup = tr.copy()
                tr_backup.set_location('B')
                dtype = type(tr.ydata[0])
                tr.ydata -= dtype(tr.get_ydata().mean())
                tr.highpass(fband['order'], fband['corner_hp'])
                tr.taper(taper, chop=False)
                tr.lowpass(fband['order'], fband['corner_lp'])
                if num.max(num.abs(tr.get_ydata())) != 0:
                    self.max_tr[tr.nslc_id] = num.max(num.abs(tr.get_ydata()))

        if reference_nsl is not True:
            reference_nslc = list(
                filter(
                    lambda x: util.match_nslc(
                        guess_nsl_template(reference_nsl), x),
                    self.max_tr.keys()))
            self.____reference_nslc = reference_nslc
            if not len(reference_nslc) == 1:
                logger.info('no reference trace available. ' +
                            'remains unfinished: %s' % self.event)
                self.reference_scale = 1.
                # self.set_relative_scalings()
                self.finished = False
            else:
                self.reference_scale = self.max_tr[reference_nslc[0]]
                self.set_relative_scalings()
                self.finished = True
        else:
            self.reference_scale = 1.
            self.set_relative_scalings()
            self.finished = True
    def finish(self, reference_nsl, fband, taper):
        for tr in self.traces:
            tr_backup = tr.copy()
            tr_backup.set_location('B' )
            tr.ydata -= tr.get_ydata().mean()
            tr.highpass(fband['order'], fband['corner_hp'])
            tr.taper(taper, chop=False)
            tr.lowpass(fband['order'], fband['corner_lp'])
            self.max_tr[tr.nslc_id] = num.max(num.abs(tr.get_ydata()))

        if reference_nsl is not True:
            reference_nslc = filter(
                lambda x: util.match_nslc(guess_nsl_template(reference_nsl), x), self.max_tr.keys())
            self.____reference_nslc = reference_nslc

            if not len(reference_nslc)==1:
                logger.info('no reference trace available. remains unfinished: %s' % self.event)
                self.reference_scale = 1.
                #self.set_relative_scalings()
                self.finished = False
            else:
                self.reference_scale = self.max_tr[reference_nslc[0]]
                self.set_relative_scalings()
                self.finished = True
        else:
            self.reference_scale = 1.
            self.set_relative_scalings()
            self.finished = True
Example #3
0
 def nslc_to_index(self):
     ''' Returns a dictionary which maps nslc codes to trace indices.'''
     d = OrderedDict()
     idx = 0
     for nslc in self.config.channels:
         if not util.match_nslc(self.config.blacklist, nslc):
             d[nslc] = idx
             idx += 1
     return d
Example #4
0
    def __call__(self, trs):
        matched = []
        for tr in trs:
            nslc = '.'.join(tr.nslc_id)

            if self.white_list and nslc in self.white_list:
                matched.append(tr)
                continue
            if self.white_list_regex and util.match_nslc(
                    self.white_list_regex, nslc):
                matched.append(tr)
        return matched
Example #5
0
def make_targets(pile, stations):
    targets = []
    for nslc_id in pile.nslc_ids.keys():
        for s in stations:
            if util.match_nslc('%s.*'%(s.nsl_string()), nslc_id):
                targets.append(Target(lat=s.lat,
                                     lon=s.lon,
                                     depth=s.depth,
                                     elevation=s.elevation,
                                     codes=nslc_id))
            else:
                continue
    return targets
Example #6
0
def make_targets(pile, stations):
    targets = []
    for nslc_id in pile.nslc_ids.keys():
        for s in stations:
            if util.match_nslc('%s.*'%(s.nsl_string()), nslc_id):
                targets.append(Target(lat=s.lat,
                                     lon=s.lon,
                                     depth=s.depth,
                                     elevation=s.elevation,
                                     codes=nslc_id))
            else:
                continue
    return targets
Example #7
0
    def get_weights(self, nsls):
        if self.weights is None:
            return num.ones(len(nsls), dtype=num.float)

        else:
            weights = num.empty(len(nsls))
            selectors = self.weights.keys()
            for insl, nsl in enumerate(nsls):
                weights[insl] = 1.
                for selector in selectors:
                    if util.match_nslc(selector, nsl):
                        weights[insl] = self.weights[selector]
                        break
            return weights
Example #8
0
def get_average_scaling(trs_dict, reference_nsl_pattern):

    scalings = {}
    for event, trs in trs_dict.items():
        refs = [
            tr for tr in trs
            if util.match_nslc(reference_nsl_pattern, tr.nslc_id)
        ]
        for ref in refs:
            for tr in trs:
                if tr.channel == ref.channel:
                    append_to_dict(scalings, tr.nslc_id,
                                   power(tr) / power(ref))

    for k, v in scalings.items():
        scalings[k] = num.mean(v)

    return scalings
def get_targets(stations, data_pile, store_id=None):
    targets = []
    nslc_ids = data_pile.nslc_ids
    for station in stations:
        for nslc_id in nslc_ids:
            if match_nslc('%s.*' % station.nsl_string(), nslc_id):
                targets.append(
                    Target(
                        codes=nslc_id,
                        lat=station.lat,
                        lon=station.lon,
                        elevation=station.elevation,
                        depth=station.depth,
                        store_id=store_id)
                    )

                nslc_ids.remove(nslc_id)


    return targets
Example #10
0
    def finish(self, method, fband, taper, ev_counter):
        self.logs.info('METHOD %s' % method)
        self.logs.debug(len(method))

        if len(method) == 2:
            reference_nsl = method[1][1]
            reference_nslc = list(
                filter(
                    lambda x: util.match_nslc(
                        guess_nsl_template(reference_nsl), x),
                    self.max_tr.keys()))

            self.____reference_nslc = reference_nslc

            if not len(reference_nslc) == 1:
                self.logs.info('no reference trace available. ' +
                               'remains unfinished: %s' % self.event)
                self.finished = False
            else:
                self.reference_scale = self.max_tr[reference_nslc[0]]
                self.set_relative_scalings()
                self.finished = True

        elif method == 'scale_one' or method == 'median_all_avail':
            self.reference_scale = 1.
            self.set_relative_scalings()
            self.finished = True

        elif method == 'syn_comp':
            #print(self.max_tr_syn)
            #print(self.max_tr)
            for nslc_id, maxA in self.max_tr.items():
                try:
                    #print(nslc_id[0:2])
                    self.relative_scalings[nslc_id] = maxA / self.max_tr_syn[
                        nslc_id[0:2]]
                except:
                    self.logs.warning('syn data missing: %s' % nslc_id[0:2])
            #print(self.relative_scalings)
            self.finished = True
    def process(self, fband, taper):
        for event in self.candidates:
            section = Section(event, self.stations)
            skipped = 0
            unskipped = 0
            for i_s, s in enumerate(self.stations):
                dist = distance_accurate50m(event, s)
                arrival = self.phaser.t(self.phase_selection, (event.depth, dist))
                if arrival==None:
                    skipped +=1
                    logger.debug('skipping event %s at stations %s. Reason no phase arrival'
                                % (event, s))
                    continue
                else:
                    unskipped +=1
                selector = lambda tr: util.match_nslc('%s.*%s'%(s.nsl_string(),
                                                                self.component),
                                                      tr.nslc_id)
                window_min, window_max = self.window.t()
                tr = self.data_pile.chopper(tmin=event.time+arrival - window_min,
                                            tmax=event.time+arrival + window_max,
                                            trace_selector=selector)
                _tr = tr.next()
                try:
                    assert len(_tr) in (0, 1)
                    self.all_nslc_ids.add(_tr[0].nslc_id)
                    section.extend(_tr)
                except IndexError:
                    continue
                try:
                    tr.next()
                    raise Exception('More than one trace returned')
                except StopIteration:
                    continue

            logger.debug('skipped %s/%s'%(skipped, unskipped))

            section.finish(self.reference_nsl, fband, taper)
            self.sections.append(section)
Example #12
0
    def process(self, fband, taper):
        for event in self.candidates:
            section = Section(event, self.stations)
            skipped = 0
            unskipped = 0
            for i_s, s in enumerate(self.stations):
                dist = distance_accurate50m(event, s)
                arrival = self.phaser.t(self.phase_selection,
                                        (event.depth, dist))
                if arrival is None:
                    skipped += 1
                    logger.debug('skipping event %s at stations %s. ' +
                                 'Reason no phase arrival' % (event, s))
                    continue
                else:
                    unskipped += 1
                selector = lambda tr: util.match_nslc(
                    '%s.*%s' % (s.nsl_string(), self.component), tr.nslc_id)
                window_min, window_max = self.window.t()
                tr = self.data_pile.chopper(
                    tmin=event.time + arrival - window_min,
                    tmax=event.time + arrival + window_max,
                    trace_selector=selector)
                _tr = next(tr, False)  # does not work without try and except?!
                if not _tr or len(_tr) != 1:
                    continue
                if not _tr[0].ydata.size:
                    continue
                self.all_nslc_ids.add(_tr[0].nslc_id)

                section.extend(_tr)

            logger.debug('skipped %s/%s' % (skipped, unskipped))

            section.finish(self.reference_nsl, fband, taper)
            self.sections.append(section)
 def selector(tr):
     return util.match_nslc("%s.%s.%s.*" % nsl_id, tr.nslc_id)
Example #14
0
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(
                float(zstart) * km,
                float(zstop) * km,
                float(inkr) * km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event) == 1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(
                lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id
                                     ), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % [])
            else:
                station = station[0]
            targets = [
                station_to_target(station,
                                  quantity=settings.quantity,
                                  store_id=settings.store_id)
            ]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' %
                                   error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources,
                                             key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources,
                                              key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(
                            t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                            t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets,
                                             sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces) == 1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(
                mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(
                mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            mesh_fwidth = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            misfits_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc - fc * 2. / 5.
                fr_max = fc + fc * 2. / 5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(
                            mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(
                            mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc + width * 0.5),
                                                order=4,
                                                type='low'),
                            ButterworthResponse(corner=float(fc - width * 0.5),
                                                order=4,
                                                type='high')
                        ]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(
                            candidate=candidate,
                            setup=settings.misfit_setup,
                            debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(
                                len(test_depths) + 1, 1, i + 1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m / n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print('best fits: \n', i_best_fits)
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits * 100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
Example #15
0
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(
        lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id),
        stations)
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                    t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(
        zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == 'velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t('begin',
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time + onset + zoom_window[0],
                          tmax=event.time + onset + zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color='black',
                            alpha=0.5)
        ax.text(zoom_window[0] * 1.09,
                y_pos,
                '%1.1f' % (s.depth / 1000.),
                horizontalalignment='right')  #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                    distances=[s.distance_to(t) * cake.m2d],
                                    zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    #fontsize=12,
                    color='black',
                    verticalalignment='top',
                    horizontalalignment='right')

            except IndexError:
                logger.warning(
                    'no pP phase at d=%s z=%s stat=%s' %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception('No Trace found!')
    if len(traces) > 1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == 'displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=onset + zoom_window[0] + correction,
                          tmax=onset + zoom_window[1] + correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0,
                0.6,
                'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure)  #, fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {
            'array-id': ''.join(station.nsl()),
            'event_name': event.name,
            'event_time': time_to_str(event.time)
        }
        ax.text(0.5,
                1.05,
                settings.title % params,
                horizontalalignment='center',
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters': ''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. ' % (
                f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1. / store.config.deltat
        cax.text(
            0,
            0,
            'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s'
            % captions,
            fontsize=12,
            transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
Example #16
0
    def process(self,
                event,
                timing,
                bazi=None,
                slow=None,
                restitute=False,
                *args,
                **kwargs):
        '''
      :param timing: CakeTiming. Uses the definition without the offset.
      :param fn_dump_center: filename to where center stations shall be dumped
      :param fn_beam: filename of beam trace
      :param model: earthmodel to use(optional)
      :param earthmodel to use(optional)
      :param network: network code(optional)
      :param station: station code(optional)
        '''
        logger.debug('start beam forming')
        stations = self.stations
        network_code = kwargs.get('responses', None)
        network_code = kwargs.get('network', '')
        station_code = kwargs.get('station', 'STK')
        c_station_id = (network_code, station_code)
        t_shifts = []
        lat_c, lon_c, z_c = self.c_lat_lon_z

        self.station_c = Station(lat=float(lat_c),
                                 lon=float(lon_c),
                                 elevation=float(z_c),
                                 depth=0.,
                                 name='Array Center',
                                 network=c_station_id[0],
                                 station=c_station_id[1][:5])
        fn_dump_center = kwargs.get('fn_dump_center', 'array_center.pf')
        fn_beam = kwargs.get('fn_beam', 'beam.mseed')
        if event:
            mod = cake.load_model(crust2_profile=(event.lat, event.lon))
            dist = ortho.distance_accurate50m(event, self.station_c)
            ray = timing.t(mod, (event.depth, dist), get_ray=True)

            if ray is None:
                logger.error(
                    'None of defined phases available at beam station:\n %s' %
                    self.station_c)
                return
            else:
                b = ortho.azimuth(self.station_c, event)
                if b >= 0.:
                    self.bazi = b
                elif b < 0.:
                    self.bazi = 360. + b
                self.slow = ray.p / (cake.r2d * cake.d2m)
        else:
            self.bazi = bazi
            self.slow = slow

        logger.info(
            'stacking %s with slowness %1.4f s/km at back azimut %1.1f '
            'degrees' %
            ('.'.join(c_station_id), self.slow * cake.km, self.bazi))

        lat0 = num.array([lat_c] * len(stations))
        lon0 = num.array([lon_c] * len(stations))
        lats = num.array([s.lat for s in stations])
        lons = num.array([s.lon for s in stations])
        ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons)
        theta = num.float(self.bazi * num.pi / 180.)
        R = num.array([[num.cos(theta), -num.sin(theta)],
                       [num.sin(theta), num.cos(theta)]])
        distances = R.dot(num.vstack((es, ns)))[1]
        channels = set()
        self.stacked = {}
        num_stacked = {}
        self.t_shifts = {}
        self.shifted_traces = []
        taperer = trace.CosFader(xfrac=0.05)
        if self.diff_dt_treat == 'downsample':
            self.traces.sort(key=lambda x: x.deltat)
        elif self.diff_dt_treat == 'oversample':
            dts = [t.deltat for t in self.traces]
            for tr in self.traces:
                tr.resample(min(dts))

        for tr in self.traces:
            if tr.nslc_id[:2] == c_station_id:
                continue
            tr = tr.copy(data=True)
            tr.ydata = tr.ydata.astype(
                num.float64) - tr.ydata.mean(dtype=num.float64)
            tr.taper(taperer)
            try:
                stack_trace = self.stacked[tr.channel]
                num_stacked[tr.channel] += 1
            except KeyError:
                stack_trace = tr.copy(data=True)
                stack_trace.set_ydata(num.zeros(len(stack_trace.get_ydata())))

                stack_trace.set_codes(network=c_station_id[0],
                                      station=c_station_id[1],
                                      location='',
                                      channel=tr.channel)

                self.stacked[tr.channel] = stack_trace
                channels.add(tr.channel)
                num_stacked[tr.channel] = 1

            nslc_id = tr.nslc_id

            try:
                stats = list(
                    filter(
                        lambda x: util.match_nslc('%s.%s.%s.*' % x.nsl(),
                                                  nslc_id), stations))
                stat = stats[0]
            except IndexError:
                break

            i = stations.index(stat)
            d = distances[i]
            t_shift = d * self.slow
            t_shifts.append(t_shift)
            tr.shift(t_shift)
            self.t_shifts[tr.nslc_id[:2]] = t_shift
            if self.normalize_std:
                tr.ydata = tr.ydata / tr.ydata.std()

            if num.abs(tr.deltat - stack_trace.deltat) > 0.000001:
                if self.diff_dt_treat == 'downsample':
                    stack_trace.downsample_to(tr.deltat)
                elif self.diff_dt_treat == 'upsample':
                    raise Exception(
                        'something went wrong with the upsampling, previously')
            stack_trace.add(tr)
            self.shifted_traces.append(tr)

        if self.post_normalize:
            for ch, tr in self.stacked.items():
                tr.set_ydata(tr.get_ydata() / num_stacked[ch])

        self.save_station(fn_dump_center)
        self.checked_nslc([stack_trace])
        self.save(stack_trace, fn_beam)
        return self.shifted_traces, stack_trace, t_shifts
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event)==1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % []) 
            else:
                station = station[0]
            targets = [station_to_target(station, quantity=settings.quantity, store_id=settings.store_id)]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' % error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets, sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces)==1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            mesh_fwidth = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            misfits_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc-fc*2./5.
                fr_max = fc+fc*2./5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc+width*0.5), order=4, type='low'),
                            ButterworthResponse(corner=float(fc-width*0.5), order=4, type='high')]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(candidate=candidate, setup=settings.misfit_setup, debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(len(test_depths)+1, 1, i+1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m/n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print 'best fits: \n', i_best_fits
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits*100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
Example #18
0
    def get_targets(self, ds, event, default_path='none'):
        logger.debug('Selecting waveform targets...')
        origin = event
        targets = []

        for st in ds.get_stations():
            for cha in self.channels:

                nslc = st.nsl() + (cha, )

                target = WaveformMisfitTarget(
                    quantity='displacement',
                    codes=nslc,
                    lat=st.lat,
                    lon=st.lon,
                    depth=st.depth,
                    interpolation=self.interpolation,
                    store_id=self.store_id,
                    misfit_config=self.misfit_config,
                    manual_weight=self.weight,
                    normalisation_family=self.normalisation_family,
                    path=self.path or default_path)

                if ds.is_blacklisted(nslc):
                    log_exclude(target, 'excluded by dataset')
                    continue

                if util.match_nslc(nslcs_to_patterns(self.exclude), nslc):
                    log_exclude(target, 'excluded by target group')
                    continue

                if self.include is not None and not util.match_nslc(
                        nslcs_to_patterns(self.include), nslc):
                    log_exclude(target, 'excluded by target group')
                    continue

                if self.distance_min is not None and \
                   target.distance_to(origin) < self.distance_min:
                    log_exclude(target, 'distance < distance_min')
                    continue

                if self.distance_max is not None and \
                   target.distance_to(origin) > self.distance_max:
                    log_exclude(target, 'distance > distance_max')
                    continue

                if self.distance_3d_min is not None and \
                   target.distance_3d_to(origin) < self.distance_3d_min:
                    log_exclude(target, 'distance_3d < distance_3d_min')
                    continue

                if self.distance_3d_max is not None and \
                   target.distance_3d_to(origin) > self.distance_3d_max:
                    log_exclude(target, 'distance_3d > distance_3d_max')
                    continue

                if self.depth_min is not None and \
                   target.depth < self.depth_min:
                    log_exclude(target, 'depth < depth_min')
                    continue

                if self.depth_max is not None and \
                   target.depth > self.depth_max:
                    log_exclude(target, 'depth > depth_max')
                    continue

                azi, _ = target.azibazi_to(origin)
                if cha == 'R':
                    target.azimuth = azi - 180.
                    target.dip = 0.
                elif cha == 'T':
                    target.azimuth = azi - 90.
                    target.dip = 0.
                elif cha == 'Z':
                    target.azimuth = 0.
                    target.dip = -90.

                target.set_dataset(ds)
                targets.append(target)

        if self.limit:
            return weed(origin, targets, self.limit)[0]
        else:
            return targets
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return 

    event = model.load_events(settings.event_filename)
    assert len(event)==1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
    assert len(station) == 1
    station = station[0] 
    targets = [station_to_target(station, quantity=quantity, store_id=settings.store_id)]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz-minz)*0.02
    for s, t, tr in request.iter_results():
        if quantity=='velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t(
            'begin', (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata()-onset-s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time+onset+zoom_window[0],
                          tmax=event.time+onset+zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata/ampl_scale)*relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color='black', alpha=0.5)
        ax.text(zoom_window[0]*1.09, y_pos, '%1.1f' % (s.depth/1000.), horizontalalignment='right') #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                      distances=[s.distance_to(t)*cake.m2d],
                                      zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t-onset]*2
                y = [y_pos-(maxz-minz)*0.025, y_pos+(maxz-minz)*0.025]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(x_marker[1]-x_marker[1]*0.005, y[1], label,
                        #fontsize=12,
                        color='black',
                        verticalalignment='top',
                        horizontalalignment='right')

            except IndexError:
                logger.warning('no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t)*cake.m2d,
                                                                     s.depth, station.station))
                pass

    if len(traces)==0:
        raise Exception('No Trace found!')
    if len(traces)>1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity=='displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata()-onset+correction
        tr_ydata = tr.get_ydata() *-1
        visible = tr.chop(tmin=onset+zoom_window[0]+correction,
                          tmax=onset+zoom_window[1]+correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata/ampl_scale * settings.gain*settings.gain_record)*relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin-zrange*0.2, zmax+zrange*0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0, 0.6, 'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure) #, fontsize=12.)

    if fill:
        ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color=settings.color, alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax+zrange*0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {'array-id': ''.join(station.nsl()),
                  'event_name': event.name,
                  'event_time': time_to_str(event.time)}
        ax.text(0.5, 1.05, settings.title % params,
                horizontalalignment='center', 
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters':''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. '%(f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1./store.config.deltat
        cax.text(0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions,
                 fontsize=12, transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
Example #20
0
 def trace_selector(x):
     return util.match_nslc('*.*.*.%s' % self.want_channel, x.nslc_id)
Example #21
0
def check(
        config,
        event_names=None,
        target_string_ids=None,
        show_waveforms=False,
        n_random_synthetics=10,
        stations_used_path=None):

    markers = []
    stations_used = {}
    erroneous = []
    for ievent, event_name in enumerate(event_names):
        ds = config.get_dataset(event_name)
        event = ds.get_event()
        trs_all = []
        try:
            problem = config.get_problem(event)

            _, nfamilies = problem.get_family_mask()
            logger.info('Problem: %s' % problem.name)
            logger.info('Number of target families: %i' % nfamilies)
            logger.info('Number of targets (total): %i' % len(problem.targets))

            if target_string_ids:
                problem.targets = [
                    target for target in problem.targets
                    if util.match_nslc(target_string_ids, target.string_id())]

            logger.info(
                'Number of targets (selected): %i' % len(problem.targets))

            check_problem(problem)

            results_list = []
            sources = []
            if n_random_synthetics == 0:
                x = problem.get_reference_model()
                sources.append(problem.base_source)
                results = problem.evaluate(x)
                results_list.append(results)

            else:
                for i in range(n_random_synthetics):
                    x = problem.get_random_model()
                    sources.append(problem.get_source(x))
                    results = problem.evaluate(x)
                    results_list.append(results)

            if show_waveforms:
                engine = config.engine_config.get_engine()
                times = []
                tdata = []
                for target in problem.targets:
                    tobs_shift_group = []
                    tcuts = []
                    for source in sources:
                        tmin_fit, tmax_fit, tfade, tfade_taper = \
                            target.get_taper_params(engine, source)

                        times.extend((tmin_fit-tfade*2., tmax_fit+tfade*2.))

                        tobs, tsyn = target.get_pick_shift(engine, source)
                        if None not in (tobs, tsyn):
                            tobs_shift = tobs - tsyn
                        else:
                            tobs_shift = 0.0

                        tcuts.append(target.get_cutout_timespan(
                            tmin_fit+tobs_shift, tmax_fit+tobs_shift, tfade))

                        tobs_shift_group.append(tobs_shift)

                    tcuts = num.array(tcuts, dtype=num.float)

                    tdata.append((
                        tfade,
                        num.mean(tobs_shift_group),
                        (num.min(tcuts[:, 0]), num.max(tcuts[:, 1]))))

                tmin = min(times)
                tmax = max(times)

                tmax += (tmax-tmin)*2

                for (tfade, tobs_shift, tcut), target in zip(
                        tdata, problem.targets):

                    store = engine.get_store(target.store_id)

                    deltat = store.config.deltat

                    freqlimits = list(target.get_freqlimits())
                    freqlimits[2] = 0.45/deltat
                    freqlimits[3] = 0.5/deltat
                    freqlimits = tuple(freqlimits)

                    try:
                        trs_projected, trs_restituted, trs_raw, _ = \
                            ds.get_waveform(
                                target.codes,
                                tmin=tmin+tobs_shift,
                                tmax=tmax+tobs_shift,
                                tfade=tfade,
                                freqlimits=freqlimits,
                                deltat=deltat,
                                backazimuth=target.
                                get_backazimuth_for_waveform(),
                                debug=True)

                    except NotFound as e:
                        logger.warn(str(e))
                        continue

                    trs_projected = copy.deepcopy(trs_projected)
                    trs_restituted = copy.deepcopy(trs_restituted)
                    trs_raw = copy.deepcopy(trs_raw)

                    for trx in trs_projected + trs_restituted + trs_raw:
                        trx.shift(-tobs_shift)
                        trx.set_codes(
                            network='',
                            station=target.string_id(),
                            location='')

                    for trx in trs_projected:
                        trx.set_codes(location=trx.location + '2_proj')

                    for trx in trs_restituted:
                        trx.set_codes(location=trx.location + '1_rest')

                    for trx in trs_raw:
                        trx.set_codes(location=trx.location + '0_raw')

                    trs_all.extend(trs_projected)
                    trs_all.extend(trs_restituted)
                    trs_all.extend(trs_raw)

                    for source in sources:
                        tmin_fit, tmax_fit, tfade, tfade_taper = \
                            target.get_taper_params(engine, source)

                        markers.append(pmarker.Marker(
                            nslc_ids=[('', target.string_id(), '*_proj', '*')],
                            tmin=tmin_fit, tmax=tmax_fit))

                    markers.append(pmarker.Marker(
                        nslc_ids=[('', target.string_id(), '*_raw', '*')],
                        tmin=tcut[0]-tobs_shift, tmax=tcut[1]-tobs_shift,
                        kind=1))

            else:
                for itarget, target in enumerate(problem.targets):

                    nok = 0
                    for results in results_list:
                        result = results[itarget]
                        if not isinstance(result, gf.SeismosizerError):
                            nok += 1

                    if nok == 0:
                        sok = 'not used'
                    elif nok == len(results_list):
                        sok = 'ok'
                        try:
                            s = ds.get_station(target)
                            stations_used[s.nsl()] = s
                        except (NotFound, InvalidObject):
                            pass
                    else:
                        sok = 'not used (%i/%i ok)' % (nok, len(results_list))

                    logger.info('%-40s %s' % (
                        (target.string_id() + ':', sok)))

        except GrondError as e:
            logger.error('Event %i, "%s": %s' % (
                ievent,
                event.name or util.time_to_str(event.time),
                str(e)))

            erroneous.append(event)

        if show_waveforms:
            trace.snuffle(trs_all, stations=ds.get_stations(), markers=markers)

        if stations_used_path:
            stations = list(stations_used.values())
            stations.sort(key=lambda s: s.nsl())
            model.dump_stations(stations, stations_used_path)

    if erroneous:
        raise GrondError(
            'Check failed for events: %s'
            % ', '.join(ev.name for ev in erroneous))
Example #22
0
    def call(self):

        self.cleanup()
        viewer = self.get_viewer()

        vtmin, vtmax = viewer.get_time_range()
        pile = self.get_pile()
        traces = list(
            pile.chopper(tmin=vtmin,
                         tmax=vtmax,
                         trace_selector=viewer.trace_selector))
        event, _ = self.get_active_event_and_stations()
        traces = [tr for trs in traces for tr in trs]
        stations = []
        for tr in traces:
            if tr.nslc_id[:2] in viewer.stations.keys():
                stations.append(viewer.get_station(viewer.station_key(tr)))
        distances = [ortho.distance_accurate50m(event, s) for s in stations]
        distances = [d / 1000. for d in distances]
        maxd = max(distances)
        mind = min(distances)
        distances = dict(zip([s.nsl() for s in stations], distances))
        matching_traces = [
            x for x in traces
            if util.match_nslc(self.get_station_patterns(stations), x.nslc_id)
        ]
        if self.add_markers:
            markers = self.get_markers()
            markers = [
                m for m in markers
                if m.tmax <= vtmax and m.tmin >= vtmin and m.selected
            ]
            markers = dict(zip([tuple(m.nslc_ids) for m in markers], markers))

        if self.fig is None or self.fframe.closed is True or not self._live_update:
            self.fframe = self.pylab(get='figure_frame')
            self.fig = self.fframe.gcf()

        if self._live_update:
            self.fig.clf()

        ymin = mind - 0.06 * (maxd - mind)
        ymax = maxd + 0.06 * (maxd - mind)
        ax = self.fig.add_subplot(111)
        xmin = 9E9
        xmax = -xmin
        texts = []
        manual_scale = 0.1 * (maxd - mind) * self.yscale

        if self.ampl_scaler == 'total min/max':
            max_trace = max(matching_traces,
                            key=lambda x: max(abs(x.get_ydata())))
            tr_maxy = max(abs(max_trace.get_ydata()))
            ampl_scale = float(tr_maxy)

        for tr in matching_traces:
            if viewer.highpass:
                tr.highpass(4, viewer.highpass)
            if viewer.lowpass:
                tr.lowpass(4, viewer.lowpass)
            if tr.nslc_id[:3] not in distances.keys():
                continue

            if self.t_red:
                red = distances[tr.nslc_id[:3]] / self.t_red
            else:
                red = 0.
            y_pos = distances[tr.nslc_id[:3]]
            xdata = tr.get_xdata() - red - event.time
            xmin = min(xmin, min(xdata))
            xmax = max(xmax, max(xdata))
            tr_ydata = tr.get_ydata()
            if self.ampl_scaler == 'trace min/max':
                ampl_scale = float(max(abs(tr_ydata)))
            elif self.ampl_scaler == 'standard deviation':
                ampl_scale = float(num.std(tr_ydata))
            ydata = (tr_ydata / ampl_scale * manual_scale) + y_pos
            ax.plot(xdata, ydata, c='black', linewidth=0.2)
            if self.fill_between:
                ax.fill_between(xdata,
                                y_pos,
                                ydata,
                                where=ydata > y_pos,
                                color='black',
                                alpha=0.5)
            texts.append(
                ax.text(xmax,
                        y_pos,
                        '%s.%s.%s.%s' % tr.nslc_id,
                        horizontalalignment='right',
                        fontsize=6.))
            if self.add_markers:
                for ids, m in markers.items():
                    if m.match_nslc(tr.nslc_id) or ids == ():
                        c = m.select_color(m.color_b)
                        c = [ci / 255. for ci in c]
                        t = m.tmin
                        x = [t - red - event.time, t - red - event.time]
                        y = [
                            y_pos - (maxd - mind) * 0.025,
                            y_pos + (maxd - mind) * 0.025
                        ]
                        ax.plot(x, y, linewidth=1, color=c)
                        label = m.get_label()
                        if not label:
                            label = ''

                        ax.text(x[1] - x[1] * 0.005,
                                y[1],
                                label,
                                color=c,
                                fontsize=6,
                                verticalalignment='top',
                                horizontalalignment='right')
        for txt in texts:
            txt.set_x(xmax)
        vred_str = '= '+str(round(self.t_red, 2)) + 'km/s' if self.t_red \
                else 'off'
        ax.text(0.5,
                0.01,
                'time window: %s - %s  |   Reduction velocity %s' %
                (util.tts(vtmin), util.tts(vtmax), vred_str),
                verticalalignment='bottom',
                horizontalalignment='center',
                transform=self.fig.transFigure)

        ax.set_ylim([ymin, ymax])
        ax.set_xlim([xmin, xmax])
        ax.set_ylabel('Distance [km]')
        ax.set_xlabel('(red.) Time [s]')
        self.fig.canvas.draw()
 def trace_selector(x):
     return util.match_nslc('*.*.*.%s' % self.want_channel, x.nslc_id)
Example #24
0
def load_response_information(
        filename, format, nslc_patterns=None, fake_input_units=None):

    from pyrocko import pz, trace
    from pyrocko.io import resp as fresp

    resps = []
    labels = []
    if format == 'sacpz':
        if fake_input_units is not None:
            raise Exception(
                'cannot guess true input units from plain SAC PZ files')

        zeros, poles, constant = pz.read_sac_zpk(filename)
        resp = trace.PoleZeroResponse(
            zeros=zeros, poles=poles, constant=constant)

        resps.append(resp)
        labels.append(filename)

    elif format == 'pf':
        if fake_input_units is not None:
            raise Exception(
                'cannot guess true input units from plain response files')

        resp = guts.load(filename=filename)
        resps.append(resp)
        labels.append(filename)

    elif format == 'resp':
        for resp in list(fresp.iload_filename(filename)):
            if nslc_patterns is not None and not util.match_nslc(
                    nslc_patterns, resp.codes):
                continue

            units = ''
            if resp.response.instrument_sensitivity:
                s = resp.response.instrument_sensitivity
                if s.input_units and s.output_units:
                    units = ', %s -> %s' % (
                        fake_input_units or s.input_units.name,
                        s.output_units.name)

            resps.append(resp.response.get_pyrocko_response(
                resp.codes, fake_input_units=fake_input_units))

            labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % (
                (filename, ) + resp.codes +
                (tts(resp.start_date), tts(resp.end_date), units)))

    elif format == 'stationxml':
        from pyrocko.fdsn import station as fs

        sx = fs.load_xml(filename=filename)
        for network in sx.network_list:
            for station in network.station_list:
                for channel in station.channel_list:
                    nslc = (
                        network.code,
                        station.code,
                        channel.location_code,
                        channel.code)

                    if nslc_patterns is not None and not util.match_nslc(
                            nslc_patterns, nslc):
                        continue

                    if not channel.response:
                        logger.warn(
                            'no response for channel %s.%s.%s.%s given.'
                            % nslc)
                        continue

                    units = ''
                    if channel.response.instrument_sensitivity:
                        s = channel.response.instrument_sensitivity
                        if s.input_units and s.output_units:
                            units = ', %s -> %s' % (
                                fake_input_units or s.input_units.name,
                                s.output_units.name)

                    resps.append(channel.response.get_pyrocko_response(
                        nslc, fake_input_units=fake_input_units))

                    labels.append(
                        '%s (%s.%s.%s.%s, %s - %s%s)' % (
                            (filename, ) + nslc +
                            (tts(channel.start_date),
                             tts(channel.end_date),
                             units)))

    return resps, labels
Example #25
0
    def call(self):
        self.cleanup()

        try:
            viewer = self.get_viewer()
            cli_mode = False
        except NoViewerSet:
            viewer = None
            cli_mode = True
        
        if not cli_mode:
            if self.only_active:
                active_event, active_stations = \
                self.get_active_event_and_stations()
            else:
                active_event = None
                active_stations = viewer.stations.values()
        elif cli_mode:
            active_stations = self.stations

        station_list=[]
        if active_stations:
            for stat in active_stations:
                if (viewer and not util.match_nslc(viewer.blacklist, stat.nsl())) or cli_mode:
                    xml_station_marker = XMLStationMarker(
                        nsl='.'.join(stat.nsl()),
                        longitude = float(stat.lon),
                        latitude = float(stat.lat),
                        active = 'yes')

                    station_list.append(xml_station_marker)

        else:
            stations_list = []
        active_station_list = StationMarkerList(stations=station_list)

        if self.only_active:
            markers = [viewer.get_active_event_marker()]
        else:
            if not cli_mode:
                tmin, tmax = self.get_selected_time_range(fallback=True)
                markers = [m for m in viewer.get_markers()
                           if isinstance(m, gui_util.EventMarker) and\
                          m.tmin>=tmin and m.tmax<=tmax]

            else:
                markers = self.markers

        ev_marker_list = []
        for m in markers:
            xmleventmarker = convert_event_marker(m)
            ev_marker_list.append(xmleventmarker)

        event_list=EventMarkerList(events= ev_marker_list)
        event_station_list = MarkerLists(
            station_marker_list=active_station_list,
            event_marker_list=event_list)

        event_station_list.validate()

        tempdir = tempfile.mkdtemp(dir=self.tempdir())

        if self.map_kind == 'Google Maps':
            map_fn = 'map_googlemaps.html'
        elif self.map_kind == 'OpenStreetMap':
            map_fn = 'map_osm.html'

        url = 'file://' + tempdir + '/' + map_fn

        for entry in ['loadxmldoc.js', map_fn]:
            if cli_mode:
                snuffling_dir = os.environ['HOME']+'/.snufflings/map/'
            else:
                snuffling_dir = self.module_dir()

            shutil.copy(os.path.join(snuffling_dir, entry),
                    os.path.join(tempdir, entry))

        markers_fn = os.path.join(tempdir, 'markers.xml')
        dump_xml(event_station_list, filename=markers_fn)

        if self.open_external:
            QDesktopServices.openUrl(QUrl(url))
        else:
            global g_counter
            g_counter += 1
            self.web_frame(
                url,
                name='Map %i (%s)' % (g_counter, self.map_kind))
Example #26
0
    def call(self):
        if not self.viewer_connected:
            self.get_viewer().about_to_close.connect(
                self.file_serving_worker.stop)
            self.viewer_connected = True
        try:
            from OpenGL import GL  # noqa
        except ImportError:
            logger.warn('Could not find package OpenGL, '
                        'if the map does not work try installing OpenGL\n'
                        'e.g. sudo pip install PyOpenGL')

        self.cleanup()

        try:
            viewer = self.get_viewer()
            cli_mode = False
        except NoViewerSet:
            viewer = None
            cli_mode = True

        if not cli_mode:
            if self.only_active:
                _, active_stations = \
                    self.get_active_event_and_stations()
            else:
                active_stations = viewer.stations.values()
        elif cli_mode:
            active_stations = self.stations

        station_list = []
        if active_stations:
            for stat in active_stations:
                is_blacklisted = util.match_nslc(viewer.blacklist, stat.nsl())
                if (viewer and not is_blacklisted) or cli_mode:
                    xml_station_marker = XMLStationMarker(
                        nsl='.'.join(stat.nsl()),
                        longitude=float(stat.lon),
                        latitude=float(stat.lat),
                        active='yes')

                    station_list.append(xml_station_marker)

        active_station_list = StationMarkerList(stations=station_list)

        if self.only_active:
            markers = [viewer.get_active_event_marker()]
        else:
            if cli_mode:
                markers = self.markers
            else:
                markers = self.get_selected_markers()
                if len(markers) == 0:
                    tmin, tmax = self.get_selected_time_range(fallback=True)
                    markers = [
                        m for m in viewer.get_markers()
                        if isinstance(m, gui_util.EventMarker)
                        and m.tmin >= tmin and m.tmax <= tmax
                    ]

        ev_marker_list = []
        for m in markers:
            if not isinstance(m, gui_util.EventMarker):
                continue
            xmleventmarker = convert_event_marker(m)
            if xmleventmarker is None:
                continue
            ev_marker_list.append(xmleventmarker)

        event_list = EventMarkerList(events=ev_marker_list)
        event_station_list = MarkerLists(
            station_marker_list=active_station_list,
            event_marker_list=event_list)

        event_station_list.validate()
        if self.map_kind != 'GMT':
            tempdir = self.marker_tempdir
            if self.map_kind == 'Google Maps':
                map_fn = 'map_googlemaps.html'
            elif self.map_kind == 'OpenStreetMap':
                map_fn = 'map_osm.html'

            url = 'http://localhost:' + str(self.port) + '/%s' % map_fn

            files = ['loadxmldoc.js', 'map_util.js', 'plates.kml', map_fn]
            snuffling_dir = op.dirname(op.abspath(__file__))
            for entry in files:
                shutil.copy(os.path.join(snuffling_dir, entry),
                            os.path.join(tempdir, entry))
            logger.debug('copied data to %s' % tempdir)
            markers_fn = os.path.join(self.marker_tempdir, 'markers.xml')
            self.data_proxy.content_to_serve.emit(self.port)
            dump_xml(event_station_list, filename=markers_fn)

            if self.open_external:
                qg.QDesktopServices.openUrl(qc.QUrl(url))
            else:
                global g_counter
                g_counter += 1
                self.web_frame(url,
                               name='Map %i (%s)' % (g_counter, self.map_kind))
        else:
            lats_all = []
            lons_all = []

            slats = []
            slons = []
            slabels = []
            for s in active_stations:
                slats.append(s.lat)
                slons.append(s.lon)
                slabels.append('.'.join(s.nsl()))

            elats = []
            elons = []
            elats = []
            elons = []
            psmeca_input = []
            markers = self.get_selected_markers()
            for m in markers:
                if isinstance(m, gui_util.EventMarker):
                    e = m.get_event()
                    elats.append(e.lat)
                    elons.append(e.lon)
                    if e.moment_tensor is not None:
                        mt = e.moment_tensor.m6()
                        psmeca_input.append((e.lon, e.lat, e.depth / 1000.,
                                             mt[0], mt[1], mt[2], mt[3], mt[4],
                                             mt[5], 1., e.lon, e.lat, e.name))
                    else:
                        if e.magnitude is None:
                            moment = -1.
                        else:
                            moment = moment_tensor.magnitude_to_moment(
                                e.magnitude)
                            psmeca_input.append(
                                (e.lon, e.lat, e.depth / 1000., moment / 3.,
                                 moment / 3., moment / 3., 0., 0., 0., 1.,
                                 e.lon, e.lat, e.name))

            lats_all.extend(elats)
            lons_all.extend(elons)
            lats_all.extend(slats)
            lons_all.extend(slons)

            lats_all = num.array(lats_all)
            lons_all = num.array(lons_all)

            if len(lats_all) == 0:
                return

            center_lat, center_lon = ortho.geographic_midpoint(
                lats_all, lons_all)
            ntotal = len(lats_all)
            clats = num.ones(ntotal) * center_lat
            clons = num.ones(ntotal) * center_lon
            dists = ortho.distance_accurate50m_numpy(clats, clons, lats_all,
                                                     lons_all)

            maxd = num.max(dists) or 0.
            m = Map(lat=center_lat,
                    lon=center_lon,
                    radius=max(10000., maxd) * 1.1,
                    width=35,
                    height=25,
                    show_grid=True,
                    show_topo=True,
                    color_dry=(238, 236, 230),
                    topo_cpt_wet='light_sea_uniform',
                    topo_cpt_dry='light_land_uniform',
                    illuminate=True,
                    illuminate_factor_ocean=0.15,
                    show_rivers=False,
                    show_plates=False)

            m.gmt.psxy(in_columns=(slons, slats), S='t15p', G='black', *m.jxyr)
            for i in range(len(active_stations)):
                m.add_label(slats[i], slons[i], slabels[i])

            m.gmt.psmeca(in_rows=psmeca_input,
                         S='m1.0',
                         G='red',
                         C='5p,0/0/0',
                         *m.jxyr)

            tmpdir = self.tempdir()

            self.outfn = os.path.join(tmpdir, '%i.png' % self.figcount)
            m.save(self.outfn)
            f = self.pixmap_frame(self.outfn)  # noqa
    def call(self):
        if not self.viewer_connected:
            self.get_viewer().about_to_close.connect(
                self.file_serving_worker.stop)
            self.viewer_connected = True
        try:
            from OpenGL import GL  # noqa
        except ImportError:
            logger.warn(
                'Could not find package OpenGL, '
                'if the map does not work try installing OpenGL\n'
                'e.g. sudo pip install PyOpenGL')

        self.cleanup()

        try:
            viewer = self.get_viewer()
            cli_mode = False
        except NoViewerSet:
            viewer = None
            cli_mode = True

        if not cli_mode:
            if self.only_active:
                _, active_stations = \
                    self.get_active_event_and_stations()
            else:
                active_stations = viewer.stations.values()
        elif cli_mode:
            active_stations = self.stations

        station_list = []
        if active_stations:
            for stat in active_stations:
                is_blacklisted = util.match_nslc(viewer.blacklist, stat.nsl())
                if (viewer and not is_blacklisted) or cli_mode:
                    xml_station_marker = XMLStationMarker(
                        nsl='.'.join(stat.nsl()),
                        longitude=float(stat.lon),
                        latitude=float(stat.lat),
                        active='yes')

                    station_list.append(xml_station_marker)

        active_station_list = StationMarkerList(stations=station_list)

        if self.only_active:
            markers = [viewer.get_active_event_marker()]
        else:
            if cli_mode:
                markers = self.markers
            else:
                markers = self.get_selected_markers()
                if len(markers) == 0:
                    tmin, tmax = self.get_selected_time_range(fallback=True)
                    markers = [m for m in viewer.get_markers()
                               if isinstance(m, gui_util.EventMarker) and
                               m.tmin >= tmin and m.tmax <= tmax]

        ev_marker_list = []
        for m in markers:
            if not isinstance(m, gui_util.EventMarker):
                continue
            xmleventmarker = convert_event_marker(m)
            if xmleventmarker is None:
                continue
            ev_marker_list.append(xmleventmarker)

        event_list = EventMarkerList(events=ev_marker_list)
        event_station_list = MarkerLists(
            station_marker_list=active_station_list,
            event_marker_list=event_list)

        event_station_list.validate()
        if self.map_kind != 'GMT':
            tempdir = self.marker_tempdir
            if self.map_kind == 'Google Maps':
                map_fn = 'map_googlemaps.html'
            elif self.map_kind == 'OpenStreetMap':
                map_fn = 'map_osm.html'

            url = 'http://localhost:' + str(self.port) + '/%s' % map_fn

            files = ['loadxmldoc.js', 'map_util.js', 'plates.kml', map_fn]
            snuffling_dir = op.dirname(op.abspath(__file__))
            for entry in files:
                shutil.copy(os.path.join(snuffling_dir, entry),
                            os.path.join(tempdir, entry))
            logger.debug('copied data to %s' % tempdir)
            markers_fn = os.path.join(self.marker_tempdir, 'markers.xml')
            self.data_proxy.content_to_serve.emit(self.port)
            dump_xml(event_station_list, filename=markers_fn)

            if self.open_external:
                qg.QDesktopServices.openUrl(qc.QUrl(url))
            else:
                global g_counter
                g_counter += 1
                self.web_frame(
                    url,
                    name='Map %i (%s)' % (g_counter, self.map_kind))
        else:
            lats_all = []
            lons_all = []

            slats = []
            slons = []
            slabels = []
            for s in active_stations:
                slats.append(s.lat)
                slons.append(s.lon)
                slabels.append('.'.join(s.nsl()))

            elats = []
            elons = []
            elats = []
            elons = []
            psmeca_input = []
            markers = self.get_selected_markers()
            for m in markers:
                if isinstance(m, gui_util.EventMarker):
                    e = m.get_event()
                    elats.append(e.lat)
                    elons.append(e.lon)
                    if e.moment_tensor is not None:
                        mt = e.moment_tensor.m6()
                        psmeca_input.append(
                            (e.lon, e.lat, e.depth/1000., mt[0], mt[1],
                             mt[2], mt[3], mt[4], mt[5],
                             1., e.lon, e.lat, e.name))
                    else:
                        if e.magnitude is None:
                            moment = -1.
                        else:
                            moment = moment_tensor.magnitude_to_moment(
                                e.magnitude)
                            psmeca_input.append(
                                (e.lon, e.lat, e.depth/1000.,
                                 moment/3., moment/3., moment/3.,
                                 0., 0., 0., 1., e.lon, e.lat, e.name))

            lats_all.extend(elats)
            lons_all.extend(elons)
            lats_all.extend(slats)
            lons_all.extend(slons)

            lats_all = num.array(lats_all)
            lons_all = num.array(lons_all)

            if len(lats_all) == 0:
                return

            center_lat, center_lon = ortho.geographic_midpoint(
                lats_all, lons_all)
            ntotal = len(lats_all)
            clats = num.ones(ntotal) * center_lat
            clons = num.ones(ntotal) * center_lon
            dists = ortho.distance_accurate50m_numpy(
                clats, clons, lats_all, lons_all)

            maxd = num.max(dists) or 0.
            m = Map(
                lat=center_lat, lon=center_lon,
                radius=max(10000., maxd) * 1.1,
                width=35, height=25,
                show_grid=True,
                show_topo=True,
                color_dry=(238, 236, 230),
                topo_cpt_wet='light_sea_uniform',
                topo_cpt_dry='light_land_uniform',
                illuminate=True,
                illuminate_factor_ocean=0.15,
                show_rivers=False,
                show_plates=False)

            m.gmt.psxy(in_columns=(slons, slats), S='t15p', G='black', *m.jxyr)
            for i in range(len(active_stations)):
                m.add_label(slats[i], slons[i], slabels[i])

            m.gmt.psmeca(
                in_rows=psmeca_input, S='m1.0', G='red', C='5p,0/0/0', *m.jxyr)

            tmpdir = self.tempdir()

            self.outfn = os.path.join(tmpdir, '%i.png' % self.figcount)
            m.save(self.outfn)
            f = self.pixmap_frame(self.outfn)  # noqa
Example #28
0
        action="store_true",
    )
    parser.add_argument(
        "--show",
        help="show figure at the end",
        default=False,
        required=False,
        action="store_true",
    )
    args = parser.parse_args()

    stations = model.load_stations(args.stations)

    if args.usestations:
        stations = [
            s for s in stations if util.match_nslc(args.usestations, s.nsl())
        ]

    events = []
    if args.events:
        events.extend(model.load_events(args.events))
    if args.markers:
        markers = gui_util.load_markers(args.markers)
        events.extend([m.get_event() for m in markers])
    get_bounds(
        stations,
        events=events,
        usestations=args.usestations,
        printall=args.printall,
        show_fig=args.show,
    )
Example #29
0
 def match_nslc(self, nslc):
     '''See documentation of :py:func:`pyrocko.util.match_nslc`'''
     patterns = ['.'.join(x) for x in self.nslc_ids]
     return util.match_nslc(patterns, nslc)
Example #30
0
 def reject_blacklisted(self, tr):
     '''returns `False` if nslc codes of `tr` match any of the blacklisting
     patters. Otherwise returns `True`'''
     return not util.match_nslc(self.config.blacklist, tr.nslc_id)
    def call(self):
        self.cleanup()
        c_station_id = ('_', 'STK')
        if self.unit == 's/deg':
            slow_factor = 1. / onedeg
        elif self.unit == 's/km':
            slow_factor = 1. / 1000.

        slow = self.slow * slow_factor
        if self.stacked_traces is not None:
            self.add_traces(self.stacked_traces)
        viewer = self.get_viewer()
        if self.station_c:
            viewer.stations.pop(c_station_id)

        stations = self.get_stations()
        if len(stations) == 0:
            self.fail('No station meta information found')

        traces = list(self.chopper_selected_traces(fallback=True))
        traces = [tr for trs in traces for tr in trs]
        visible_nslcs = [tr.nslc_id for tr in traces]
        stations = [
            x for x in stations
            if util.match_nslcs("%s.%s.%s.*" % x.nsl(), visible_nslcs)
        ]
        if not self.lat_c or not self.lon_c or not self.z_c:
            self.lat_c, self.lon_c, self.z_c = self.center_lat_lon(stations)
            self.set_parameter('lat_c', self.lat_c)
            self.set_parameter('lon_c', self.lon_c)

        self.station_c = Station(lat=float(self.lat_c),
                                 lon=float(self.lon_c),
                                 elevation=float(self.z_c),
                                 depth=0.,
                                 name='Array Center',
                                 network=c_station_id[0],
                                 station=c_station_id[1])

        viewer.add_stations([self.station_c])
        lat0 = num.array([self.lat_c] * len(stations))
        lon0 = num.array([self.lon_c] * len(stations))
        lats = num.array([s.lat for s in stations])
        lons = num.array([s.lon for s in stations])
        ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons)
        theta = num.float(self.bazi * num.pi / 180.)
        R = num.array([[num.cos(theta), -num.sin(theta)],
                       [num.sin(theta), num.cos(theta)]])
        distances = R.dot(num.vstack((es, ns)))[1]
        channels = set()
        self.stacked = {}
        num_stacked = {}
        self.t_shifts = {}
        shifted_traces = []
        taperer = trace.CosFader(xfrac=0.05)
        if self.diff_dt_treat == 'downsample':
            traces.sort(key=lambda x: x.deltat)
        elif self.diff_dt_treat == 'oversample':
            dts = [t.deltat for t in traces]
            for tr in traces:
                tr.resample(min(dts))

        for tr in traces:
            if tr.nslc_id[:2] == c_station_id:
                continue
            tr = tr.copy(data=True)
            tr.ydata = tr.ydata.astype(num.float64)
            tr.ydata -= tr.ydata.mean(dtype=num.float64)
            tr.taper(taperer)
            try:
                stack_trace = self.stacked[tr.channel]
                num_stacked[tr.channel] += 1
            except KeyError:
                stack_trace = tr.copy(data=True)
                stack_trace.set_ydata(num.zeros(len(stack_trace.get_ydata())))

                stack_trace.set_codes(network=c_station_id[0],
                                      station=c_station_id[1],
                                      location='',
                                      channel=tr.channel)

                self.stacked[tr.channel] = stack_trace
                channels.add(tr.channel)
                num_stacked[tr.channel] = 1

            nslc_id = tr.nslc_id

            try:
                stats = [
                    x for x in stations
                    if util.match_nslc('%s.%s.%s.*' % x.nsl(), nslc_id)
                ]

                stat = stats[0]
            except IndexError:
                break

            i = stations.index(stat)
            d = distances[i]
            t_shift = d * slow
            tr.shift(t_shift)
            stat = viewer.get_station(tr.nslc_id[:2])
            self.t_shifts[stat] = t_shift
            if self.normalize_std:
                tr.ydata = tr.ydata / tr.ydata.std()

            if num.abs(tr.deltat - stack_trace.deltat) > 0.000001:
                if self.diff_dt_treat == 'downsample':
                    stack_trace.downsample_to(tr.deltat)
                elif self.diff_dt_treat == 'upsample':
                    print(
                        'something went wrong with the upsampling, previously')
            stack_trace.add(tr)

            if self.add_shifted:
                tr.set_station('%s_s' % tr.station)
                shifted_traces.append(tr)

        if self.post_normalize:
            for ch, tr in self.stacked.items():
                tr.set_ydata(tr.get_ydata() / num_stacked[ch])

        self.cleanup()

        for ch, tr in self.stacked.items():
            if num_stacked[ch] > 1:
                self.add_trace(tr)

        if self.add_shifted:
            self.add_traces(shifted_traces)
Example #32
0
 def match_nslc(self, nslc):
     '''See documentation of :py:func:`pyrocko.util.match_nslc`'''
     patterns = ['.'.join(x) for x in self.nslc_ids]
     return util.match_nslc(patterns, nslc)
Example #33
0
    def call(self):

        self.cleanup()
        viewer = self.get_viewer()

        vtmin, vtmax = viewer.get_time_range()
        pile = self.get_pile()
        traces = [
            tr for tr in pile.chopper(
                tmin=vtmin, tmax=vtmax, trace_selector=viewer.trace_selector)]

        event, stations = self.get_active_event_and_stations()
        traces = [tr for trs in traces for tr in trs]

        stations_by_nsl = {s.nsl(): s for s in self.get_stations()}
        stations = [
            stations_by_nsl.get(station_key(tr), None) for tr in traces]

        distances = [
            ortho.distance_accurate50m(event, s) for s in stations if
            s is not None]

        distances = [d/1000. for d in distances]
        maxd = max(distances)
        mind = min(distances)
        distances = dict(zip([s.nsl() for s in stations], distances))
        matching_traces = [x for x in traces if util.match_nslc(
                            self.get_station_patterns(stations), x.nslc_id)]
        if self.add_markers:
            markers = self.get_markers()
            markers = [
                m for m in markers if m.tmax <= vtmax and
                m.tmin >= vtmin and m.selected]

            markers = dict(zip([tuple(m.nslc_ids) for m in markers], markers))

        if self.fig is None or self.fframe.closed or not self._live_update:
            self.fframe = self.pylab(get='figure_frame')
            self.fig = self.fframe.gcf()

        if self._live_update:
            self.fig.clf()

        ymin = mind-0.06*(maxd-mind)
        ymax = maxd+0.06*(maxd-mind)
        ax = self.fig.add_subplot(111)
        xmin = 9E9
        xmax = -xmin
        texts = []
        manual_scale = 0.1 * (maxd-mind)*self.yscale

        if self.ampl_scaler == 'total min/max':
            max_trace = max(
                matching_traces, key=lambda x: max(abs(x.get_ydata())))

            tr_maxy = max(abs(max_trace.get_ydata()))
            ampl_scale = float(tr_maxy)

        for tr in matching_traces:
            if viewer.highpass:
                tr.highpass(4, viewer.highpass)
            if viewer.lowpass:
                tr.lowpass(4, viewer.lowpass)
            if tr.nslc_id[:3] not in distances.keys():
                continue

            if self.t_red:
                red = distances[tr.nslc_id[:3]]/self.t_red
            else:
                red = 0.
            y_pos = distances[tr.nslc_id[:3]]
            xdata = tr.get_xdata()-red-event.time
            xmin = min(xmin, min(xdata))
            xmax = max(xmax, max(xdata))
            tr_ydata = tr.get_ydata()
            if self.ampl_scaler == 'trace min/max':
                ampl_scale = float(max(abs(tr_ydata)))
            elif self.ampl_scaler == 'standard deviation':
                ampl_scale = float(num.std(tr_ydata))
            ydata = (tr_ydata/ampl_scale * manual_scale) + y_pos
            ax.plot(xdata, ydata, c='black', linewidth=0.2)

            if self.fill_between:
                ax.fill_between(
                    xdata, y_pos, ydata, where=ydata > y_pos, color='black',
                    alpha=0.5)

            texts.append(
                ax.text(
                    xmax, y_pos, '%s.%s.%s.%s' % tr.nslc_id,
                    horizontalalignment='right', fontsize=6.))

            if self.add_markers:
                for ids, m in markers.items():
                    if m.match_nslc(tr.nslc_id) or ids == ():
                        c = m.select_color(m.color_b)
                        c = [ci/255. for ci in c]
                        t = m.tmin
                        x = [t-red-event.time, t-red-event.time]
                        y = [y_pos-(maxd-mind)*0.025, y_pos+(maxd-mind)*0.025]
                        ax.plot(x, y, linewidth=1, color=c)
                        label = m.get_label()
                        if not label:
                            label = ''

                        ax.text(x[1]-x[1]*0.005, y[1], label, color=c,
                                fontsize=6,
                                verticalalignment='top',
                                horizontalalignment='right')

        for txt in texts:
            txt.set_x(xmax)

        vred_str = '= '+str(round(self.t_red, 2)) + 'km/s' if self.t_red \
            else 'off'

        ax.text(0.5, 0.01, 'time window: %s - %s  |   Reduction velocity %s' %
                (util.tts(vtmin), util.tts(vtmax), vred_str),
                verticalalignment='bottom', horizontalalignment='center',
                transform=self.fig.transFigure)

        ax.set_ylim([ymin, ymax])
        ax.set_xlim([xmin, xmax])
        ax.set_ylabel('Distance [km]')
        ax.set_xlabel('(red.) Time [s]')
        self.fig.canvas.draw()
Example #34
0
    def call(self):

        self.cleanup()
        if self.stacked_traces is not None:
            self.add_traces(self.stacked_traces)
        viewer = self.get_viewer()
        if self.station_c:
            viewer.stations.pop(('', 'STK'))

        stations = self.get_stations()

        if not self.lat_c or not self.lon_c or not self.z_c:
            self.lat_c, self.lon_c, self.z_c = self.center_lat_lon(stations)
            self.set_parameter('lat_c', self.lat_c)
            self.set_parameter('lon_c', self.lon_c)

        self.station_c = Station(lat=float(self.lat_c),
                                 lon=float(self.lon_c),
                                 elevation=float(self.z_c),
                                 depth=0.,
                                 name='Array Center',
                                 network='',
                                 station='STK')

        viewer.add_stations([self.station_c])
        lat0 = num.array([self.lat_c]*len(stations))
        lon0 = num.array([self.lon_c]*len(stations))
        lats = num.array([s.lat for s in stations])
        lons = num.array([s.lon for s in stations])
        ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons)
        theta = num.float(self.bazi*num.pi/180.)
        R = num.array([[num.cos(theta), -num.sin(theta)],
                        [num.sin(theta), num.cos(theta)]])
        distances = R.dot(num.vstack((es, ns)))[1]
        channels = set()
        self.stacked = {}
        num_stacked = {}
        self.t_shifts = {}
        shifted_traces = []
        traces = list(self.chopper_selected_traces(fallback=True))
        traces = [tr for trs in traces for tr in trs ]
        taperer = trace.CosFader(xfrac=0.05)
        if self.diff_dt_treat=='downsample':
            traces.sort(key=lambda x: x.deltat)
        elif self.diff_dt_treat=='oversample':
            dts = [t.deltat for t in traces]
            for tr in traces:
                tr.resample(min(dts))

        for tr in traces:
            if tr.nslc_id[:3] == ('_', 'STK', ''):
                continue
            tr = tr.copy(data=True)
            tr.ydata = tr.ydata.astype(num.float64)
            tr.ydata -= tr.ydata.mean(dtype=num.float64)
            tr.taper(taperer)
            try:
                stack_trace = self.stacked[tr.channel]
                num_stacked[tr.channel] += 1
            except KeyError:
                stack_trace = tr.copy(data=True)
                stack_trace.set_ydata(num.zeros(
                    len(stack_trace.get_ydata())))

                stack_trace.set_codes(network='_',
                                      station='STK',
                                      location='',
                                      channel=tr.channel)

                self.stacked[tr.channel] = stack_trace
                channels.add(tr.channel)
                num_stacked[tr.channel] = 1

            nslc_id = tr.nslc_id

            try:
                stats = filter(lambda x: util.match_nslc(
                    '%s.%s.%s.*' % x.nsl(), nslc_id), stations)

                stat = stats[0]
            except IndexError:
                break

            i = stations.index(stat)
            d = distances[i]
            t_shift = d*self.slow/1000.
            tr.shift(t_shift)
            stat = viewer.get_station(tr.nslc_id[:2])
            self.t_shifts[stat] = t_shift
            if self.normalize_std:
                tr.ydata = tr.ydata/tr.ydata.std()

            if num.abs(tr.deltat-stack_trace.deltat)>0.000001:
                if self.diff_dt_treat=='downsample':
                    stack_trace.downsample_to(tr.deltat)
                elif self.diff_dt_treat=='upsample':
                    print 'something went wrong with the upsampling, previously'
            stack_trace.add(tr)

            if self.add_shifted:
                tr.set_station('%s_s' % tr.station)
                shifted_traces.append(tr)

        if self.post_normalize:
            for ch, tr in self.stacked.items():
                tr.set_ydata(tr.get_ydata()/num_stacked[ch])
        self.stacked_traces = self.stacked.values()
        self.cleanup()
        self.add_traces(self.stacked_traces)
        if self.add_shifted:
            self.add_traces(shifted_traces)
 def selector(tr):
     return util.match_nslc("%s.%s.%s.*" % nsl_id, tr.nslc_id)
                        help='name of file containing station information',
                        required=True)
    parser.add_argument('--events',
                        help='name of file containing event catalog',
                        default=False,
                        required=False)
    parser.add_argument('--printall',
                        help='Print all results to terminal',
                        default=True,
                        required=False,
                        action='store_true')
    parser.add_argument('--show',
                        help='show figure at the end',
                        default=False,
                        required=False,
                        action='store_true')
    args = parser.parse_args()

    stations = model.load_stations(args.stations)

    if args.usestations:
        stations = [s for s in stations if util.match_nslc(args.usestations, s.nsl())]

    events = []
    if args.events:
        events.extend(model.load_events(args.events))
    if args.markers:
        markers = gui_util.load_markers(args.markers)
        events.extend([m.get_event() for m in markers])
    get_bounds(stations, events=events, usestations=args.usestations, printall=args.printall, show_fig=args.show)
def plot(settings, show=False):

    # align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = "P"
    zoom_window = list(settings.zoom)
    ampl_scaler = "4*standard deviation"

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(":")
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.0
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = list(
        filter(lambda s: match_nslc("%s.%s.%s.*" % s.nsl(), traces[0].nslc_id),
               stations))
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning("%s  Using nearest neighbor instead." % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = (store.config.distance_max -
                                  farthest_source.distance_to(t))
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == "velocity":
            tr = integrate_differentiate(tr, "differentiate")

        onset = engine.get_store(t.store_id).t("begin",
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=event.time + onset + zoom_window[0],
            tmax=event.time + onset + zoom_window[1],
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c="black", linewidth=1.0, alpha=1.0)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color="black",
                            alpha=0.5)
        ax.text(
            zoom_window[0] * 1.09,
            y_pos,
            "%1.1f" % (s.depth / 1000.0),
            horizontalalignment="right",
        )  # , fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = "pP"
            arrivals = mod.arrivals(
                phases=[cake.PhaseDef(label)],
                distances=[s.distance_to(t) * cake.m2d],
                zstart=s.depth,
            )

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c="blue")

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    # fontsize=12,
                    color="black",
                    verticalalignment="top",
                    horizontalalignment="right",
                )

            except IndexError:
                logger.warning(
                    "no pP phase at d=%s z=%s stat=%s" %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception("No Trace found!")
    if len(traces) > 1:
        raise Exception("More then one trace provided!")
    else:
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == "displacement":
            tr = integrate_differentiate(tr, "integrate")
        tr = settings.do_filter(tr)
        onset = (engine.get_store(targets[0].store_id).t(
            "begin", (event.depth, s.distance_to(targets[0]))) + event.time)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=onset + zoom_window[0] + correction,
            tmax=onset + zoom_window[1] + correction,
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.0)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel("Time [s]")
        ax.text(
            0.0,
            0.6,
            "Source depth [km]",
            rotation=90,
            horizontalalignment="left",
            transform=fig.transFigure,
        )  # , fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0.0, c="black")
        vline.set_linestyle("--")
    if settings.title:
        params = {
            "array-id": "".join(station.nsl()),
            "event_name": event.name,
            "event_time": time_to_str(event.time),
        }
        ax.text(
            0.5,
            1.05,
            settings.title % params,
            horizontalalignment="center",
            transform=ax.transAxes,
        )
    if settings.auto_caption:
        cax = fig.add_axes([0.0, 0.0, 1, 0.05], label="caption")
        cax.axis("off")
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == "displacement":
            quantity_info = "integrated velocity trace. "
        if settings.quantity == "velocity":
            quantity_info = "differentiated synthetic traces. "
        if settings.quantity == "restituted":
            quantity_info = "restituted traces. "

        captions = {"filters": ""}
        for f in settings.filters:
            captions["filters"] += "%s-pass, order %s, f$_c$=%s Hz. " % (
                f.type,
                f.order,
                f.corner,
            )
        captions["quantity_info"] = quantity_info
        captions["store_sampling"] = 1.0 / store.config.deltat
        cax.text(
            0,
            0,
            "Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s"
            % captions,
            fontsize=12,
            transform=cax.transAxes,
        )
        plt.subplots_adjust(hspace=0.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info("save as: %s " % settings.save_as)
        options = settings.__dict__
        options.update({"array-id": "".join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches="tight")
    if show:
        plt.show()
Example #38
0
def load_response_information(filename,
                              format,
                              nslc_patterns=None,
                              fake_input_units=None):

    from pyrocko import pz, trace
    from pyrocko.fdsn import resp as fresp

    resps = []
    labels = []
    if format == 'sacpz':
        if fake_input_units is not None:
            raise Exception(
                'cannot guess true input units from plain SAC PZ files')

        zeros, poles, constant = pz.read_sac_zpk(filename)
        resp = trace.PoleZeroResponse(zeros=zeros,
                                      poles=poles,
                                      constant=constant)

        resps.append(resp)
        labels.append(filename)

    elif format == 'resp':
        for resp in list(fresp.iload_filename(filename)):
            if nslc_patterns is not None and not util.match_nslc(
                    nslc_patterns, resp.codes):
                continue

            units = ''
            if resp.response.instrument_sensitivity:
                s = resp.response.instrument_sensitivity
                if s.input_units and s.output_units:
                    units = ', %s -> %s' % (fake_input_units
                                            or s.input_units.name,
                                            s.output_units.name)

            resps.append(
                resp.response.get_pyrocko_response(
                    resp.codes, fake_input_units=fake_input_units))

            labels.append('%s (%s.%s.%s.%s, %s - %s%s)' %
                          ((filename, ) + resp.codes +
                           (tts(resp.start_date), tts(resp.end_date), units)))

    elif format == 'stationxml':
        from pyrocko.fdsn import station as fs

        sx = fs.load_xml(filename=filename)
        for network in sx.network_list:
            for station in network.station_list:
                for channel in station.channel_list:
                    nslc = (network.code, station.code, channel.location_code,
                            channel.code)

                    if nslc_patterns is not None and not util.match_nslc(
                            nslc_patterns, nslc):
                        continue

                    units = ''
                    if channel.response.instrument_sensitivity:
                        s = channel.response.instrument_sensitivity
                        if s.input_units and s.output_units:
                            units = ', %s -> %s' % (fake_input_units
                                                    or s.input_units.name,
                                                    s.output_units.name)

                    resps.append(
                        channel.response.get_pyrocko_response(
                            nslc, fake_input_units=fake_input_units))

                    labels.append('%s (%s.%s.%s.%s, %s - %s%s)' %
                                  ((filename, ) + nslc +
                                   (tts(channel.start_date),
                                    tts(channel.end_date), units)))

    return resps, labels
    def process(self, event, timing, bazi=None, slow=None,  restitute=False, *args, **kwargs):
        '''
        :param timing: CakeTiming. Uses the definition without the offset.
        :param fn_dump_center: filename to where center stations shall be dumped
        :param fn_beam: filename of beam trace
        :param model: earthmodel to use (optional)
        :param earthmodel to use (optional)
        :param network: network code (optional)
        :param station: station code (optional)
        '''
        logger.debug('start beam forming')
        stations = self.stations
        network_code = kwargs.get('responses', None)
        network_code = kwargs.get('network', '')
        station_code = kwargs.get('station', 'STK')
        c_station_id = (network_code, station_code)

        lat_c, lon_c, z_c = self.c_lat_lon_z

        self.station_c = Station(lat=float(lat_c),
                                 lon=float(lon_c),
                                 elevation=float(z_c),
                                 depth=0.,
                                 name='Array Center',
                                 network=c_station_id[0],
                                 station=c_station_id[1][:5])
        fn_dump_center = kwargs.get('fn_dump_center', 'array_center.pf')
        fn_beam = kwargs.get('fn_beam', 'beam.mseed')
        if event:
            mod = cake.load_model(crust2_profile=(event.lat, event.lon))
            dist = ortho.distance_accurate50m(event, self.station_c)
            ray = timing.t(mod, (event.depth, dist), get_ray=True)
            if ray is None:
                logger.error('None of defined phases available at beam station:\n %s' % self.station_c)
                return
            else:
                b = ortho.azimuth(self.station_c, event)
                if b>=0.:
                    self.bazi = b
                elif b<0.:
                    self.bazi = 360.+b
                self.slow = ray.p/(cake.r2d*cake.d2m)
        else:
            self.bazi = bazi
            self.slow = slow

        logger.info('stacking %s with slowness %1.4f s/km at back azimut %1.1f '
                    'degrees' %('.'.join(c_station_id), self.slow*cake.km, self.bazi))

        lat0 = num.array([lat_c]*len(stations))
        lon0 = num.array([lon_c]*len(stations))
        lats = num.array([s.lat for s in stations])
        lons = num.array([s.lon for s in stations])
        ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons)
        theta = num.float(self.bazi*num.pi/180.)
        R = num.array([[num.cos(theta), -num.sin(theta)],
                        [num.sin(theta), num.cos(theta)]])
        distances = R.dot(num.vstack((es, ns)))[1]
        channels = set()
        self.stacked = {}
        num_stacked = {}
        self.t_shifts = {}
        self.shifted_traces = []
        taperer = trace.CosFader(xfrac=0.05)
        if self.diff_dt_treat=='downsample':
            self.traces.sort(key=lambda x: x.deltat)
        elif self.diff_dt_treat=='oversample':
            dts = [t.deltat for t in self.traces]
            for tr in self.traces:
                tr.resample(min(dts))

        for tr in self.traces:
            if tr.nslc_id[:2] == c_station_id:
                continue
            tr = tr.copy(data=True)
            tr.ydata = tr.ydata.astype(num.float64) - tr.ydata.mean(dtype=num.float64)
            tr.taper(taperer)
            try:
                stack_trace = self.stacked[tr.channel]
                num_stacked[tr.channel] += 1
            except KeyError:
                stack_trace = tr.copy(data=True)
                stack_trace.set_ydata(num.zeros(
                    len(stack_trace.get_ydata())))

                stack_trace.set_codes(network=c_station_id[0],
                                      station=c_station_id[1],
                                      location='',
                                      channel=tr.channel)

                self.stacked[tr.channel] = stack_trace
                channels.add(tr.channel)
                num_stacked[tr.channel] = 1

            nslc_id = tr.nslc_id

            try:
                stats = filter(lambda x: util.match_nslc(
                    '%s.%s.%s.*' % x.nsl(), nslc_id), stations)

                stat = stats[0]
            except IndexError:
                break

            i = stations.index(stat)
            d = distances[i]
            t_shift = d*self.slow
            tr.shift(t_shift)
            #stat = viewer.get_station(tr.nslc_id[:2])
            self.t_shifts[tr.nslc_id[:2]] = t_shift
            if self.normalize_std:
                tr.ydata = tr.ydata/tr.ydata.std()

            if num.abs(tr.deltat-stack_trace.deltat)>0.000001:
                if self.diff_dt_treat=='downsample':
                    stack_trace.downsample_to(tr.deltat)
                elif self.diff_dt_treat=='upsample':
                    raise Exception('something went wrong with the upsampling, previously')
            stack_trace.add(tr)

            tr.set_station('%s_s' % tr.station)
            self.shifted_traces.append(tr)

        if self.post_normalize:
            for ch, tr in self.stacked.items():
                tr.set_ydata(tr.get_ydata()/num_stacked[ch])
        #for ch, tr in self.stacked.items():
        #    if num_stacked[ch]>1:
        #        self.add_trace(tr)
        self.save_station(fn_dump_center)
        self.checked_nslc([stack_trace])
        self.save(stack_trace, fn_beam)