예제 #1
0
    def add_events(self, events=None, filename=None):
        if events is not None:
            self.events.extend(events)

        if filename is not None:
            logger.debug('Loading events from file %s' % filename)
            self.events.extend(model.load_events(filename))
예제 #2
0
def plot_medians_meca(events, eventsclusters, clusters, conf, resdir, plotdir):

    nclusters = len(clusters)

    f = plt.figure(figsize=(10., 4.))
    f.subplots_adjust(left=0., right=1., bottom=0., top=1.)
    axes = f.add_subplot(1, 1, 1)

    for icl, cl in enumerate(clusters):
        medians = model.load_events(
            os.path.join(resdir, 'median_cluster' + str(cl) + '.pf'))
        median = medians[0]
        if median.moment_tensor is not None:
            median_mt = median.moment_tensor
            beachball.plot_beachball_mpl(median_mt,
                                         axes,
                                         beachball_type='full',
                                         size=150.,
                                         position=((10. * (icl + 0.5) /
                                                    nclusters), 2.),
                                         color_t=cluster_to_color(cl),
                                         alpha=1.0,
                                         linewidth=1.0)

    axes.set_xlim(0., 10.)
    axes.set_ylim(0., 4.)
    axes.set_axis_off()
    figname = os.path.join(plotdir, 'medians_meca.' + conf.figure_format)
    f.savefig(figname)
예제 #3
0
    def get_events(self):
        if self.events_path is None:
            return None

        if self._events is None:
            self._events = model.load_events(self.expand_path(
                self.events_path))

        return self._events
예제 #4
0
파일: data.py 프로젝트: chaoshunh/pinky
    def setup(self):
        self.data_pile = pile.make_pile(
            self.data_paths, fileformat=self.data_format)

        if self.data_pile.is_empty():
            sys.exit('Data pile is empty!')

        self.deltat_want = self.config.deltat_want or \
                min(self.data_pile.deltats.keys())

        self.n_samples = int(
                (self.config.sample_length + self.config.tpad) / self.deltat_want)

        logger.debug('loading marker file %s' % self.fn_markers)

        # loads just plain markers:
        markers = marker.load_markers(self.fn_markers)

        if self.fn_events:
            markers.extend(
                    [marker.EventMarker(e) for e in
                load_events(self.fn_events)])

        if self.sort_markers:
            logger.info('sorting markers!')
            markers.sort(key=lambda x: x.tmin)
        marker.associate_phases_to_events(markers)

        markers_by_nsl = {}
        for m in markers:
            if not m.match_nsl(self.config.reference_target.codes[:3]):
                continue

            if m.get_phasename().upper() != self.align_phase:
                continue

            markers_by_nsl.setdefault(m.one_nslc()[:3], []).append(m)

        assert(len(markers_by_nsl) == 1)

        # filter markers that do not have an event assigned:
        self.markers = list(markers_by_nsl.values())[0]

        if not self.labeled:
            dummy_event = Event(lat=0., lon=0., depth=0.)
            for m in self.markers:
                if not m.get_event():
                    m.set_event(dummy_event)

        self.markers = [m for m in self.markers if m.get_event() is not None]

        if not len(self.markers):
            raise Exception('No markers left in dataset')

        self.config.channels = list(self.data_pile.nslc_ids.keys())
        self.config.channels.sort()
예제 #5
0
def process(args, scenario_folder, n_tests=1, show=True):
    nstart = 8
    array_centers = []
    from .guesstimate_depth_v02 import PlotSettings, plot

    events = []
    stations = []
    mod = insheim_layered_model()

    for i in range(nstart, nstart+1):
        i = 8
        scenario_folder = "scenarios/"
        print("%s/scenario_%s/event.txt" % (scenario_folder, i))

        events.append(model.load_events("%s/scenario_%s/event.txt" % (scenario_folder, i))[0])
        stations.append(model.load_stations("%s/scenario_%s/stations.pf" % (scenario_folder, i)))
        traces = io.load(pjoin("%sscenario_%s/" % (scenario_folder, i), 'traces.mseed'))

        event = events[0]
        stations = stations[0]
        min_dist = min(
            [ortho.distance_accurate50m(s, event) for s in stations])
        max_dist = max(
            [ortho.distance_accurate50m(s, event) for s in stations])
        tmin = CakeTiming(phase_selection='first(p|P|PP)-10', fallback_time=0.001)
        tmax = CakeTiming(phase_selection='first(p|P|PP)+52', fallback_time=1000.)
        timing=(tmin, tmax)

        fns = ['.']


        array_id = "INS"


        settings_fn = pjoin("%sscenario_%s/" % (scenario_folder, i), 'plot_settings.yaml')
        settings = PlotSettings.from_argument_parser(args)

        if not settings.trace_filename:
            settings.trace_filename = pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam.mseed')
        if not settings.station_filename:
            fn_array_center = pjoin("%sscenario_%s/" % (scenario_folder, i), 'array_center.pf')
            settings.station_filename = fn_array_center
            station = model.load_stations(fn_array_center)

            settings.store_id = 'landau_100hz'


        settings.event_filename = pjoin("%sscenario_%s/" % (scenario_folder, i), "event.txt")
        settings.save_as = pjoin("%sscenario_%s/" % (scenario_folder, i), "depth_%(array-id)s.png")
        plot(settings)
        if args.overwrite_settings:
            settings.dump(filename=settings_fn)
        if show is True:
            plt.show()
예제 #6
0
    def get_event_names(self):
        def extra(path):
            return expand_template(path, dict(event_name='*'))

        def fp(path):
            return self.expand_path(path, extra=extra)

        events = []
        for fn in glob.glob(fp(self.events_path)):
            events.extend(model.load_events(filename=fn))

        event_names = [ev.name for ev in events]
        return event_names
예제 #7
0
파일: data.py 프로젝트: zhengjing8628/pinky
    def setup(self):
        self.data_pile = pile.make_pile(
            self.data_paths, fileformat=self.data_format)

        if self.data_pile.is_empty():
            sys.exit('Data pile is empty!')

        self.deltat_want = self.config.deltat_want or \
                min(self.data_pile.deltats.keys())

        self.n_samples = int(
                (self.config.sample_length + self.config.tpad) / self.deltat_want)

        logger.debug('loading marker file %s' % self.fn_markers)

        # loads just plain markers:
        markers = marker.load_markers(self.fn_markers)

        if self.fn_events:
            markers.extend(
                    [marker.EventMarker(e) for e in
                load_events(self.fn_events)])

        marker.associate_phases_to_events(markers)
        markers = [m for m in markers if isinstance(m, marker.PhaseMarker)]

        markers_dict = defaultdict(list)
        for m in markers:
            if m.get_phasename().upper() != self.align_phase:
                continue

            markers_dict[m.get_event()].append(m)

        self.markers = []
        for e, _markers in markers_dict.items():
            first = min(_markers, key=lambda x: x.tmin)
            self.markers.append(first)

        if not self.labeled:
            dummy_event = Event(lat=0., lon=0., depth=0.)
            for m in self.markers:
                if not m.get_event():
                    m.set_event(dummy_event)

        self.markers = [m for m in self.markers if m.get_event() is not None]

        if not len(self.markers):
            raise Exception('No markers left in dataset')

        self.config.channels = list(self.data_pile.nslc_ids.keys())
        self.config.channels.sort()
예제 #8
0
    def add_events(self, events=None, filename=None):
        if events is not None:
            self.events.extend(events)

        if filename is not None:
            logger.debug('Loading events from file "%s"...' % filename)
            try:
                events = model.load_events(filename)
                self.events.extend(events)
                logger.info('Loading events from %s: %i events found.' %
                            (filename, len(events)))
            except Exception as e:
                logger.warning('Could not load events from %s!', filename)
                raise e
예제 #9
0
    def load_comparison(self):
        '''
        For comparison in synthetic tests.
        '''
        fn = self.input_filename(caption='Select an event catalog')
        kind_compare = 4
        compare_events = model.load_events(fn)
        markers = [
            gui_util.EventMarker(event=e, kind=kind_compare)
            for e in compare_events
        ]

        self.markers_compare = markers
        self.add_markers(self.markers_compare)
def read_data(event_fn):
    events = model.load_events(event_fn)
    moment_tensors = []
    for e in events:
        if e.moment_tensor is not None:
            moment_tensors.append(e.moment_tensor)
        else:
            moment_tensors.append(None)

    normals = moment_tensors2normals(moment_tensors) 
    centers = to_cartesian(events)
    colors = to_colors(moment_tensors)
    
    return normals, centers, colors
예제 #11
0
def assoicate_single(ev, data_dir, store_id, store,
                     stations=None, pre=0.5,
                     post=3, reference_event=None, min_len=420,
                     pick_sigma=0.02):
    events = []
    waveforms = []
    labels = []
    gf_freq = store.config.sample_rate
    mod = store.config.earthmodel_1d
    found = False
    pathlist = Path(data_dir).glob('ev_*/')
    for path in sorted(pathlist):
        targets = []
        path = str(path)+"/"
        try:
            event = model.load_events(path+"event.txt")[0]
            if ev.time-10 < event.time and ev.time+10 > event.time:
                traces_loaded = io.load(path+"/waveforms/rest/traces.mseed")
                stations_unsorted = model.load_stations(data_dir+"stations.pf")
                for st in stations_unsorted:
                    st.dist = orthodrome.distance_accurate50m(st.lat, st.lon,
                                                              event.lat,
                                                              event.lon)
                    st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat,
                                                event.lon)
                stations = sorted(stations_unsorted, key=lambda x: x.dist,
                                  reverse=True)

                traces_processed = []
                traces = wp.check_traces(traces_loaded, stations, min_len=min_len)

                traces_processed, nsamples = wp.process_loaded_waveforms(traces,
                                                                         stations,
                                                                         ev,
                                                                         gf_freq,
                                                                         mod,
                                                                         pre,
                                                                         post)
                if found is False:
                    events.append(event)
                    waveforms.append(traces_processed)
                    found = True
        except:
            pass
    data_events, nsamples = wp.prepare_waveforms(waveforms)
    return data_events, nsamples, event
예제 #12
0
def prep_data_batch(data_dir, store_id, stations=None, pre=0.5,
                    post=3, reference_event=None, min_len=420,
                    pick_sigma=0.02):
    engine = LocalEngine(store_superdirs=['/home/asteinbe/gf_stores'])
    store = engine.get_store(store_id)
    mod = store.config.earthmodel_1d
    gf_freq = store.config.sample_rate
    cake_phase = cake.PhaseDef("P")
    phase_list = [cake_phase]
    events = []
    waveforms = []
    waveforms_shifted = []
    events = scedc_util.scedc_fm_to_pyrocko(file)
    labels = labels_from_events(events)
    pathlist = Path(data_dir).glob('ev_0/')
    for path in sorted(pathlist):
        try:
            targets = []
            path = str(path)+"/"
            event = model.load_events(path+"event.txt")[0]
            traces_loaded = io.load(path+"traces.mseed")
            stations_unsorted = model.load_stations(data_dir+"stations.pf")
            for st in stations_unsorted:
                st.dist = orthodrome.distance_accurate50m(st.lat, st.lon,
                                                          event.lat,
                                                          event.lon)
                st.azi = orthodrome.azimuth(st.lat, st.lon, event.lat,
                                            event.lon)
            stations = sorted(stations_unsorted, key=lambda x: x.dist,
                              reverse=True)

            traces_processed = []
            traces = check_traces(traces_loaded, stations, min_len=min_len)
            traces_processed, nsamples = wp.process_loaded_waveforms(traces,
                                                                  stations,
                                                                  event,
                                                                  gf_freq,
                                                                  mod,
                                                                  pre, post)
            events.append(event)
            waveforms.append(traces_processed)
        except:
            pass
    return waveforms, nsamples, events, waveforms_shifted
예제 #13
0
def subset_events_dist_cat(catalog, mag_min, mag_max,
                       tmin, tmax, st_lat, st_lon,
                       dist_min=None, dist_max=None):
    """
    Extract a subset of events from event catalog

    :param catalog: Event catalog in pyrocko format
    :param mag_min: Min. magnitude
    :param mag_max: Max. magnitude
    :param tmin: string representing UTC time
    :param tmax: string representing UTC time
    :param format tmin: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC')
    :param format tmax: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC')
    :param dist_min: min. distance (km)
    :param dist_max: max. distance (km)
    :param depth_min
    :param depth_max

    :returns: list of events
    """

    use_events = []
    events = model.load_events(catalog)
    for ev in events:
        if ev.magnitude < mag_max and\
          ev.magnitude > mag_min and\
          ev.time < util.str_to_time(tmax) and\
          ev.time > util.str_to_time(tmin):
            if dist_min or dist_max:
                dist = orthodrome.distance_accurate50m_numpy(
                       ev.lat, ev.lon, st_lat, st_lon)/1000.

                if dist_min and dist_max and\
                  dist > dist_min and dist < dist_max:
                    use_events.append(ev)

                if dist_min and not dist_max and dist > dist_min:
                    use_events.append(ev)

                if dist_max and not dist_min and dist < dist_max:
                    use_events.append(ev)

    return(use_events)
예제 #14
0
    def get_event_names(self):
        def extra(path):
            return expand_template(path, dict(event_name='*'))

        def fp(path):
            return self.expand_path(path, extra=extra)

        events = []
        events_path = fp(self.events_path)
        fns = glob.glob(events_path)
        if not fns:
            raise DatasetError('No event files matching "%s".' % events_path)

        for fn in fns:
            events.extend(model.load_events(filename=fn))

        event_names = [ev.name for ev in events]
        event_names.sort()
        return event_names
예제 #15
0
    def get_event_names(self):
        logger.info('Loading events ...')

        def extra(path):
            return expand_template(path, dict(
                event_name='*'))

        def fp(path):
            return self.expand_path(path, extra=extra)

        def check_events(events, fn):
            for ev in events:
                if not ev.name:
                    logger.warning('Event in %s has no name!', fn)
                    return
                if not ev.lat or not ev.lon:
                    logger.warning('Event %s has inconsistent coordinates!',
                                   ev.name)
                if not ev.depth:
                    logger.warning('Event %s has no depth!', ev.name)
                if not ev.time:
                    logger.warning('Event %s has no time!', ev.name)

        events = []
        events_path = fp(self.events_path)
        fns = glob.glob(events_path)
        if not fns:
            raise DatasetError('No event files matching "%s".' % events_path)

        for fn in fns:
            logger.debug('Loading from file %s' % fn)
            ev = model.load_events(filename=fn)
            check_events(ev, fn)

            events.extend(ev)

        event_names = [ev_.name for ev_ in events]
        event_names.sort()
        return event_names
예제 #16
0
def read_data(event_fn=None, events=None, get=None):
    if event_fn is not None and events is None:
        events = model.load_events(event_fn)
    else:
        events = events
    moment_tensors = []
    for e in events:
        if e.moment_tensor is not None:
            moment_tensors.append(e.moment_tensor)
        else:
            moment_tensors.append(None)

    centers = to_cartesian(events)
    colors = to_colors(moment_tensors)
    normals = []
    if get == 'rupture_plane':
        normals.append(get_rupture_planes(moment_tensors, centers))
    else:
        for g in get:
            normals.append(moment_tensors2normals(moment_tensors, g))

    return normals, centers, colors
def read_data(event_fn=None, events=None, get=None):
    if event_fn is not None and events is None:
        events = model.load_events(event_fn)
    else:
        events = events
    moment_tensors = []
    for e in events:
        if e.moment_tensor is not None:
            moment_tensors.append(e.moment_tensor)
        else:
            moment_tensors.append(None)

    centers = to_cartesian(events)
    colors = to_colors(moment_tensors)
    normals = []
    if get=='rupture_plane':
        normals.append(get_rupture_planes(moment_tensors, centers))
    else:
        for g in get:
            normals.append(moment_tensors2normals(moment_tensors, g))

    return normals, centers, colors
예제 #18
0
def beam(scenario_folder, n_tests=1, show=False):
    nstart = 8
    array_centers = []

    events = []
    stations = []
    mod = insheim_layered_model()

    for i in range(nstart, n_tests):
        print("%s/scenario_%s/event.txt" % (scenario_folder, i))

        events.append(model.load_events("%s/scenario_%s/event.txt" % (scenario_folder, i))[0])
        stations.append(model.load_stations("%s/scenario_%s/stations.pf" % (scenario_folder, i)))
        traces = io.load(pjoin("%sscenario_%s/" % (scenario_folder, i), 'traces.mseed'))

        event = events[0]
        stations = stations[0]
        min_dist = min(
            [ortho.distance_accurate50m(s, event) for s in stations])
        max_dist = max(
            [ortho.distance_accurate50m(s, event) for s in stations])
        tmin = CakeTiming(phase_selection='first(p|P|PP)-10', fallback_time=0.001)
        tmax = CakeTiming(phase_selection='first(p|P|PP)+52', fallback_time=1000.)
        timing=(tmin, tmax)
        tstart = timing[0].t(mod, (event.depth, min_dist))
        tend = timing[1].t(mod, (event.depth, max_dist))

        normalize = True
        bf = BeamForming(stations, traces, normalize=normalize)
        bf.process(event=event,
                   timing=tmin,
                   fn_dump_center=pjoin("%sscenario_%s/" % (scenario_folder, i), 'array_center.pf'),
                   fn_beam=pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam.mseed'),
                   station="INS")
        if show is True:
            bf.plot(fn=pjoin("%sscenario_%s/" % (scenario_folder, i), 'beam_shifts.png'))

        array_centers.append(bf.station_c)
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return 

    event = model.load_events(settings.event_filename)
    assert len(event)==1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
    assert len(station) == 1
    station = station[0] 
    targets = [station_to_target(station, quantity=quantity, store_id=settings.store_id)]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz-minz)*0.02
    for s, t, tr in request.iter_results():
        if quantity=='velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t(
            'begin', (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata()-onset-s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time+onset+zoom_window[0],
                          tmax=event.time+onset+zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata/ampl_scale)*relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color='black', alpha=0.5)
        ax.text(zoom_window[0]*1.09, y_pos, '%1.1f' % (s.depth/1000.), horizontalalignment='right') #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                      distances=[s.distance_to(t)*cake.m2d],
                                      zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t-onset]*2
                y = [y_pos-(maxz-minz)*0.025, y_pos+(maxz-minz)*0.025]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(x_marker[1]-x_marker[1]*0.005, y[1], label,
                        #fontsize=12,
                        color='black',
                        verticalalignment='top',
                        horizontalalignment='right')

            except IndexError:
                logger.warning('no pP phase at d=%s z=%s stat=%s' % (s.distance_to(t)*cake.m2d,
                                                                     s.depth, station.station))
                pass

    if len(traces)==0:
        raise Exception('No Trace found!')
    if len(traces)>1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity=='displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata()/max(abs(tr.get_ydata())))
            ax.tick_params(axis='y', which='both', left='off', right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata()-onset+correction
        tr_ydata = tr.get_ydata() *-1
        visible = tr.chop(tmin=onset+zoom_window[0]+correction,
                          tmax=onset+zoom_window[1]+correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4*float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata/ampl_scale * settings.gain*settings.gain_record)*relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin-zrange*0.2, zmax+zrange*0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0, 0.6, 'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure) #, fontsize=12.)

    if fill:
        ax.fill_between(xdata, y_pos, ydata, where=ydata<y_pos, color=settings.color, alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax+zrange*0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {'array-id': ''.join(station.nsl()),
                  'event_name': event.name,
                  'event_time': time_to_str(event.time)}
        ax.text(0.5, 1.05, settings.title % params,
                horizontalalignment='center', 
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters':''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. '%(f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1./store.config.deltat
        cax.text(0, 0, 'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s' % captions,
                 fontsize=12, transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
예제 #20
0

def __snufflings__():
    return [MapMaker()]


if __name__ == '__main__':
    util.setup_logging('map.py', 'info')
    s = MapMaker()
    options, args, parser = s.setup_cli()
    s.markers = []

    if options.stations_filename:
        stations = model.load_stations(options.stations_filename)
        s.stations = stations
    else:
        s.stations = None

    if options.events_filename:
        events = model.load_events(filename=options.events_filename)
        markers = [gui_util.EventMarker(e) for e in events]
        s.markers.extend(markers)

    if options.markers_filename:
        markers = gui_util.load_markers(options.markers_filename)
        s.markers.extend(markers)
    s.open_external = True
    mapmap = {'google': 'Google Maps', 'osm': 'OpenStreetMap'}
    s.map_kind = mapmap[options.map_provider]
    s.call()
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(float(zstart)*km, float(zstop)*km, float(inkr)*km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event)==1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % []) 
            else:
                station = station[0]
            targets = [station_to_target(station, quantity=settings.quantity, store_id=settings.store_id)]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' % error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources, key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources, key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, min_dist_delta*cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(t.lat, t.lon, azi, max_dist_delta*cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets, sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces)==1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            mesh_fwidth = num.zeros(len(center_freqs)*num_f_widths*len(candidates))
            misfits_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros((len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc-fc*2./5.
                fr_max = fc+fc*2./5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc+width*0.5), order=4, type='low'),
                            ButterworthResponse(corner=float(fc-width*0.5), order=4, type='high')]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(candidate=candidate, setup=settings.misfit_setup, debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(len(test_depths)+1, 1, i+1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m/n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print 'best fits: \n', i_best_fits
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits*100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
예제 #22
0
파일: snuffler.py 프로젝트: emolch/pyrocko
def snuffler_from_commandline(args=None):
    if args is None:
        args = sys.argv[1:]

    usage = '''usage: %prog [options] waveforms ...'''
    parser = OptionParser(usage=usage)

    parser.add_option(
        '--format',
        dest='format',
        default='detect',
        choices=io.allowed_formats('load'),
        help='assume input files are of given FORMAT. Choices: %s'
             % io.allowed_formats('load', 'cli_help', 'detect'))

    parser.add_option(
        '--pattern',
        dest='regex',
        metavar='REGEX',
        help='only include files whose paths match REGEX')

    parser.add_option(
        '--stations',
        dest='station_fns',
        action='append',
        default=[],
        metavar='STATIONS',
        help='read station information from file STATIONS')

    parser.add_option(
        '--stationxml',
        dest='stationxml_fns',
        action='append',
        default=[],
        metavar='STATIONSXML',
        help='read station information from XML file STATIONSXML')

    parser.add_option(
        '--event', '--events',
        dest='event_fns',
        action='append',
        default=[],
        metavar='EVENT',
        help='read event information from file EVENT')

    parser.add_option(
        '--markers',
        dest='marker_fns',
        action='append',
        default=[],
        metavar='MARKERS',
        help='read marker information file MARKERS')

    parser.add_option(
        '--follow',
        type='float',
        dest='follow',
        metavar='N',
        help='follow real time with a window of N seconds')

    parser.add_option(
        '--cache',
        dest='cache_dir',
        default=config.config().cache_dir,
        metavar='DIR',
        help='use directory DIR to cache trace metadata '
             '(default=\'%default\')')

    parser.add_option(
        '--force-cache',
        dest='force_cache',
        action='store_true',
        default=False,
        help='use the cache even when trace attribute spoofing is active '
             '(may have silly consequences)')

    parser.add_option(
        '--store-path',
        dest='store_path',
        metavar='PATH_TEMPLATE',
        help='store data received through streams to PATH_TEMPLATE')

    parser.add_option(
        '--store-interval',
        type='float',
        dest='store_interval',
        default=600,
        metavar='N',
        help='dump stream data to file every N seconds [default: %default]')

    parser.add_option(
        '--ntracks',
        type='int',
        dest='ntracks',
        default=24,
        metavar='N',
        help='initially use N waveform tracks in viewer [default: %default]')

    parser.add_option(
        '--opengl',
        dest='opengl',
        action='store_true',
        default=False,
        help='use OpenGL for drawing')

    parser.add_option(
        '--qt5',
        dest='gui_toolkit_qt5',
        action='store_true',
        default=False,
        help='use Qt5 for the GUI')

    parser.add_option(
        '--qt4',
        dest='gui_toolkit_qt4',
        action='store_true',
        default=False,
        help='use Qt4 for the GUI')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    options, args = parser.parse_args(list(args))

    if options.debug:
        util.setup_logging('snuffler', 'debug')
    else:
        util.setup_logging('snuffler', 'warning')

    if options.gui_toolkit_qt4:
        config.override_gui_toolkit = 'qt4'

    if options.gui_toolkit_qt5:
        config.override_gui_toolkit = 'qt5'

    this_pile = pile.Pile()
    stations = []
    for stations_fn in extend_paths(options.station_fns):
        stations.extend(model.station.load_stations(stations_fn))

    for stationxml_fn in extend_paths(options.stationxml_fns):
        stations.extend(
            stationxml.load_xml(
                filename=stationxml_fn).get_pyrocko_stations())

    events = []
    for event_fn in extend_paths(options.event_fns):
        events.extend(model.load_events(event_fn))

    markers = []
    for marker_fn in extend_paths(options.marker_fns):
        markers.extend(marker.load_markers(marker_fn))

    return snuffle(
        this_pile,
        stations=stations,
        events=events,
        markers=markers,
        ntracks=options.ntracks,
        follow=options.follow,
        controls=True,
        opengl=options.opengl,
        paths=args,
        cache_dir=options.cache_dir,
        regex=options.regex,
        format=options.format,
        force_cache=options.force_cache,
        store_path=options.store_path,
        store_interval=options.store_interval)
예제 #23
0

def __snufflings__():
    return [MapMaker()]


if __name__ == '__main__':
    util.setup_logging('map.py', 'info')
    s = MapMaker()
    options, args, parser = s.setup_cli()
    s.markers = []

    if options.stations_filename:
        stations = model.load_stations(options.stations_filename)
        s.stations = stations
    else:
        s.stations = None

    if options.events_filename:
        events = model.load_events(filename=options.events_filename)
        markers = [gui_util.EventMarker(e) for e in events]
        s.markers.extend(markers)

    if options.markers_filename:
        markers = gui_util.load_markers(options.markers_filename)
        s.markers.extend(markers)
    s.open_external = True
    mapmap = {'google': 'Google Maps', 'osm': 'OpenStreetMap'}
    s.map_kind = mapmap[options.map_provider]
    s.call()
예제 #24
0
    def invert(self, args):
        align_phase = 'P'
        ampl_scaler = '4*standard deviation'

        for array_id in self.provider.use:
            try:
                if args.array_id and array_id != args.array_id:
                    continue
            except AttributeError:
                pass
            subdir = pjoin('array_data', array_id)
            settings_fn = pjoin(subdir, 'plot_settings.yaml')
            if os.path.isfile(settings_fn):
                settings = PlotSettings.load(filename=pjoin(settings_fn))
                settings.update_from_args(self.args)
            else:
                logger.warn('no settings found: %s' % array_id)
                continue
            if settings.store_superdirs:
                engine = LocalEngine(store_superdirs=settings.store_superdirs)
            else:
                engine = LocalEngine(use_config=True)
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            try:
                store = engine.get_store(settings.store_id)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return

            if not settings.trace_filename:
                settings.trace_filename = pjoin(subdir, 'beam.mseed')
            if not settings.station_filename:
                settings.station_filename = pjoin(subdir, 'array_center.pf')
            zoom_window = settings.zoom
            mod = store.config.earthmodel_1d

            zstart, zstop, inkr = settings.depths.split(':')
            test_depths = num.arange(
                float(zstart) * km,
                float(zstop) * km,
                float(inkr) * km)
            traces = io.load(settings.trace_filename)
            event = model.load_events(settings.event_filename)
            assert len(event) == 1
            event = event[0]
            event.depth = float(settings.depth) * 1000.
            base_source = MTSource.from_pyrocko_event(event)

            test_sources = []
            for d in test_depths:
                s = base_source.clone()
                s.depth = float(d)
                test_sources.append(s)

            stations = model.load_stations(settings.station_filename)
            station = filter(
                lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id
                                     ), stations)
            if len(station) != 1:
                logger.error('no matching stations found. %s %s' % [])
            else:
                station = station[0]
            targets = [
                station_to_target(station,
                                  quantity=settings.quantity,
                                  store_id=settings.store_id)
            ]
            try:
                request = engine.process(targets=targets, sources=test_sources)
            except seismosizer.NoSuchStore as e:
                logger.info('%s ... skipping.' % e)
                return
            except meta.OutOfBounds as error:
                if settings.force_nearest_neighbor:
                    logger.warning('%s  Using nearest neighbor instead.' %
                                   error)
                    mod_targets = []
                    for t in targets:
                        closest_source = min(test_sources,
                                             key=lambda s: s.distance_to(t))
                        farthest_source = max(test_sources,
                                              key=lambda s: s.distance_to(t))
                        min_dist_delta = store.config.distance_min - closest_source.distance_to(
                            t)
                        max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                            t)
                        if min_dist_delta < 0:
                            azi, bazi = closest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                        elif max_dist_delta < 0:
                            azi, bazi = farthest_source.azibazi_to(t)
                            newlat, newlon = ortho.azidist_to_latlon(
                                t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                        t.lat, t.lon = newlat, newlon
                        mod_targets.append(t)
                    request = engine.process(targets=mod_targets,
                                             sources=test_sources)
                else:
                    raise error

            candidates = []
            for s, t, tr in request.iter_results():
                tr.deltat = regularize_float(tr.deltat)
                if True:
                    tr = integrate_differentiate(tr, 'differentiate')
                tr = settings.do_filter(tr)
                candidates.append((s, tr))
            assert len(traces) == 1
            ref = traces[0]
            ref = settings.do_filter(ref)
            dist = ortho.distance_accurate50m(event, station)
            tstart = self.provider.timings[array_id].timings[0].t(
                mod, (event.depth, dist)) + event.time
            tend = self.provider.timings[array_id].timings[1].t(
                mod, (event.depth, dist)) + event.time
            ref = ref.chop(tstart, tend)
            misfits = []

            center_freqs = num.arange(1., 9., 4.)
            num_f_widths = len(center_freqs)

            mesh_fc = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            mesh_fwidth = num.zeros(
                len(center_freqs) * num_f_widths * len(candidates))
            misfits_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            depths_array = num.zeros(
                (len(center_freqs), num_f_widths, len(candidates)))
            debug = False
            pb = ProgressBar(maxval=max(center_freqs)).start()
            i = 0
            for i_fc, fc in enumerate(center_freqs):
                if debug:
                    fig = plt.figure()

                fl_min = fc - fc * 2. / 5.
                fr_max = fc + fc * 2. / 5.
                widths = num.linspace(fl_min, fr_max, num_f_widths)

                for i_width, width in enumerate(widths):
                    i_candidate = 0
                    mesh_fc[i] = fc
                    mesh_fwidth[i] = width
                    i += 1
                    for source, candidate in candidates:
                        candidate = candidate.copy()
                        tstart = self.provider.timings[array_id].timings[0].t(
                            mod, (source.depth, dist)) + event.time
                        tend = self.provider.timings[array_id].timings[1].t(
                            mod, (source.depth, dist)) + event.time
                        filters = [
                            ButterworthResponse(corner=float(fc + width * 0.5),
                                                order=4,
                                                type='low'),
                            ButterworthResponse(corner=float(fc - width * 0.5),
                                                order=4,
                                                type='high')
                        ]
                        settings.filters = filters
                        candidate = settings.do_filter(candidate)
                        candidate.chop(tmin=tstart, tmax=tend)
                        candidate.shift(float(settings.correction))
                        m, n, aproc, bproc = ref.misfit(
                            candidate=candidate,
                            setup=settings.misfit_setup,
                            debug=True)
                        aproc.set_codes(station='aproc')
                        bproc.set_codes(station='bproc')
                        if debug:
                            ax = fig.add_subplot(
                                len(test_depths) + 1, 1, i + 1)
                            ax.plot(aproc.get_xdata(), aproc.get_ydata())
                            ax.plot(bproc.get_xdata(), bproc.get_ydata())
                        mf = m / n
                        #misfits.append((source.depth, mf))
                        misfits_array[i_fc][i_width][i_candidate] = mf
                        i_candidate += 1
                pb.update(fc)

            pb.finish()
            fig = plt.figure()
            ax = fig.add_subplot(111)
            i_best_fits = num.argmin(misfits_array, 2)
            print('best fits: \n', i_best_fits)
            best_fits = num.min(misfits_array, 2)
            #cmap = matplotlib.cm.get_cmap()
            xmesh, ymesh = num.meshgrid(mesh_fc, mesh_fwidth)
            #c = (best_fits-num.min(best_fits))/(num.max(best_fits)-num.min(best_fits))
            ax.scatter(xmesh, ymesh, best_fits * 100)
            #ax.scatter(mesh_fc, mesh_fwidth, c)
            #ax.scatter(mesh_fc, mesh_fwidth, s=best_fits)
            ax.set_xlabel('fc')
            ax.set_ylabel('f_width')
        plt.legend()
        plt.show()
예제 #25
0
    )
    parser.add_argument(
        "--show",
        help="show figure at the end",
        default=False,
        required=False,
        action="store_true",
    )
    args = parser.parse_args()

    stations = model.load_stations(args.stations)

    if args.usestations:
        stations = [
            s for s in stations if util.match_nslc(args.usestations, s.nsl())
        ]

    events = []
    if args.events:
        events.extend(model.load_events(args.events))
    if args.markers:
        markers = gui_util.load_markers(args.markers)
        events.extend([m.get_event() for m in markers])
    get_bounds(
        stations,
        events=events,
        usestations=args.usestations,
        printall=args.printall,
        show_fig=args.show,
    )
예제 #26
0
파일: beachball.py 프로젝트: wsja/pyrocko
        alpha=alpha)


if __name__ == '__main__':
    import sys
    import os
    import matplotlib.pyplot as plt
    from pyrocko import model

    args = sys.argv[1:]

    data = []
    for iarg, arg in enumerate(args):

        if os.path.exists(arg):
            events = model.load_events(arg)
            for ev in events:
                if not ev.moment_tensor:
                    logger.warn('no moment tensor given for event')
                    continue

                data.append((ev.name, ev.moment_tensor))
        else:
            vals = list(map(float, arg.split(',')))
            mt = mtm.as_mt(vals)
            data.append(('%i' % (iarg+1), mt))

    n = len(data)

    ncols = 1
    while ncols**2 < n:
예제 #27
0
def snuffler_from_commandline(args=None):
    if args is None:
        args = sys.argv[1:]

    usage = '''usage: %prog [options] waveforms ...'''
    parser = OptionParser(usage=usage)

    parser.add_option(
        '--format',
        dest='format',
        default='detect',
        choices=io.allowed_formats('load'),
        help='assume input files are of given FORMAT. Choices: %s' %
        io.allowed_formats('load', 'cli_help', 'detect'))

    parser.add_option('--pattern',
                      dest='regex',
                      metavar='REGEX',
                      help='only include files whose paths match REGEX')

    parser.add_option('--stations',
                      dest='station_fns',
                      action='append',
                      default=[],
                      metavar='STATIONS',
                      help='read station information from file STATIONS')

    parser.add_option(
        '--stationxml',
        dest='stationxml_fns',
        action='append',
        default=[],
        metavar='STATIONSXML',
        help='read station information from XML file STATIONSXML')

    parser.add_option('--event',
                      '--events',
                      dest='event_fns',
                      action='append',
                      default=[],
                      metavar='EVENT',
                      help='read event information from file EVENT')

    parser.add_option('--markers',
                      dest='marker_fns',
                      action='append',
                      default=[],
                      metavar='MARKERS',
                      help='read marker information file MARKERS')

    parser.add_option('--follow',
                      type='float',
                      dest='follow',
                      metavar='N',
                      help='follow real time with a window of N seconds')

    parser.add_option('--cache',
                      dest='cache_dir',
                      default=config.config().cache_dir,
                      metavar='DIR',
                      help='use directory DIR to cache trace metadata '
                      '(default=\'%default\')')

    parser.add_option(
        '--force-cache',
        dest='force_cache',
        action='store_true',
        default=False,
        help='use the cache even when trace attribute spoofing is active '
        '(may have silly consequences)')

    parser.add_option(
        '--store-path',
        dest='store_path',
        metavar='PATH_TEMPLATE',
        help='store data received through streams to PATH_TEMPLATE')

    parser.add_option(
        '--store-interval',
        type='float',
        dest='store_interval',
        default=600,
        metavar='N',
        help='dump stream data to file every N seconds [default: %default]')

    parser.add_option(
        '--ntracks',
        type='int',
        dest='ntracks',
        default=24,
        metavar='N',
        help='initially use N waveform tracks in viewer [default: %default]')

    parser.add_option('--opengl',
                      dest='opengl',
                      action='store_true',
                      default=False,
                      help='use OpenGL for drawing')

    parser.add_option('--qt5',
                      dest='gui_toolkit_qt5',
                      action='store_true',
                      default=False,
                      help='use Qt5 for the GUI')

    parser.add_option('--qt4',
                      dest='gui_toolkit_qt4',
                      action='store_true',
                      default=False,
                      help='use Qt4 for the GUI')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    options, args = parser.parse_args(list(args))

    if options.debug:
        util.setup_logging('snuffler', 'debug')
    else:
        util.setup_logging('snuffler', 'warning')

    if options.gui_toolkit_qt4:
        config.override_gui_toolkit = 'qt4'

    if options.gui_toolkit_qt5:
        config.override_gui_toolkit = 'qt5'

    this_pile = pile.Pile()
    stations = []
    for stations_fn in extend_paths(options.station_fns):
        stations.extend(model.station.load_stations(stations_fn))

    for stationxml_fn in extend_paths(options.stationxml_fns):
        stations.extend(
            stationxml.load_xml(filename=stationxml_fn).get_pyrocko_stations())

    events = []
    for event_fn in extend_paths(options.event_fns):
        events.extend(model.load_events(event_fn))

    markers = []
    for marker_fn in extend_paths(options.marker_fns):
        markers.extend(marker.load_markers(marker_fn))

    return snuffle(this_pile,
                   stations=stations,
                   events=events,
                   markers=markers,
                   ntracks=options.ntracks,
                   follow=options.follow,
                   controls=True,
                   opengl=options.opengl,
                   paths=args,
                   cache_dir=options.cache_dir,
                   regex=options.regex,
                   format=options.format,
                   force_cache=options.force_cache,
                   store_path=options.store_path,
                   store_interval=options.store_interval)
            figs = mopadize(data)

        else:
            for i in range(testlines):
                mt1 = moment_tensor.MomentTensor.random_dc() 
                #data.append((mt1, 
                #             mt1,
                #            "line %s" % i))
                data.append(mt1)
            figs = overlay(data)
        plt.show()
    if args.events:
        sort_by = 'magnitude'
        step_width = 102
        fig_count = 0
        events = model.load_events(args.events)
        magnitudes = []
        if sort_by=='magnitude':
            try:
                events.sort(key=lambda x:x.moment_tensor.magnitude)
            except AttributeError:
                events.sort(key=lambda x:x.magnitude)

        elif sort_by=='time':
            events.sort(key=lambda x:x.time)
        for e in events:
            if e.moment_tensor is not None:
                mag = e.moment_tensor.magnitude 
            else:
                mag = e.magnitude
            magnitudes.append(mag)
예제 #29
0
def main(args=None):
    if args is None:
        args = sys.argv[1:]

    parser = OptionParser(
        usage=usage,
        description=description)

    parser.add_option(
        '--width',
        dest='width',
        type='float',
        default=20.0,
        metavar='FLOAT',
        help='set width of output image [cm] (%default)')

    parser.add_option(
        '--height',
        dest='height',
        type='float',
        default=15.0,
        metavar='FLOAT',
        help='set height of output image [cm] (%default)')

    parser.add_option(
        '--topo-resolution-min',
        dest='topo_resolution_min',
        type='float',
        default=40.0,
        metavar='FLOAT',
        help='minimum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--topo-resolution-max',
        dest='topo_resolution_max',
        type='float',
        default=200.0,
        metavar='FLOAT',
        help='maximum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--no-grid',
        dest='show_grid',
        default=True,
        action='store_false',
        help='don\'t show grid lines')

    parser.add_option(
        '--no-topo',
        dest='show_topo',
        default=True,
        action='store_false',
        help='don\'t show topography')

    parser.add_option(
        '--no-cities',
        dest='show_cities',
        default=True,
        action='store_false',
        help='don\'t show cities')

    parser.add_option(
        '--no-illuminate',
        dest='illuminate',
        default=True,
        action='store_false',
        help='deactivate artificial illumination of topography')

    parser.add_option(
        '--illuminate-factor-land',
        dest='illuminate_factor_land',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of land (0.5)')

    parser.add_option(
        '--illuminate-factor-ocean',
        dest='illuminate_factor_ocean',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of ocean (0.25)')

    parser.add_option(
        '--theme',
        choices=['topo', 'soft'],
        default='topo',
        help='select color theme, available: topo, soft (topo)"')

    parser.add_option(
        '--download-etopo1',
        dest='download_etopo1',
        action='store_true',
        help='download full ETOPO1 topography dataset')

    parser.add_option(
        '--download-srtmgl3',
        dest='download_srtmgl3',
        action='store_true',
        help='download full SRTMGL3 topography dataset')

    parser.add_option(
        '--make-decimated-topo',
        dest='make_decimated',
        action='store_true',
        help='pre-make all decimated topography datasets')

    parser.add_option(
        '--stations',
        dest='stations_fn',
        metavar='FILENAME',
        help='load station coordinates from FILENAME')

    parser.add_option(
        '--events',
        dest='events_fn',
        metavar='FILENAME',
        help='load event coordinates from FILENAME')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    (options, args) = parser.parse_args(args)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.download_etopo1:
        import pyrocko.datasets.topo.etopo1
        pyrocko.datasets.topo.etopo1.download()

    if options.download_srtmgl3:
        import pyrocko.datasets.topo.srtmgl3
        pyrocko.datasets.topo.srtmgl3.download()

    if options.make_decimated:
        import pyrocko.datasets.topo
        pyrocko.datasets.topo.make_all_missing_decimated()

    if (options.download_etopo1 or options.download_srtmgl3 or
            options.make_decimated) and len(args) == 0:

        sys.exit(0)

    if options.theme == 'soft':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.2,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.15,
            'color_wet': (216, 242, 254),
            'color_dry': (238, 236, 230),
            'topo_cpt_wet': 'light_sea_uniform',
            'topo_cpt_dry': 'light_land_uniform'}
    elif options.theme == 'topo':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.5,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.25}

    events = []
    if options.events_fn:
        events = model.load_events(options.events_fn)

    stations = []

    if options.stations_fn:
        stations = model.load_stations(options.stations_fn)

    if not (len(args) == 4 or (
            len(args) == 1 and (events or stations))):

        parser.print_help()
        sys.exit(1)

    if len(args) == 4:
        try:
            lat = float(args[0])
            lon = float(args[1])
            radius = float(args[2])*km
        except Exception:
            parser.print_help()
            sys.exit(1)
    else:
        lats, lons = latlon_arrays(stations+events)
        lat, lon = map(float, od.geographic_midpoint(lats, lons))
        radius = float(
            num.max(od.distance_accurate50m_numpy(lat, lon, lats, lons)))
        radius *= 1.1

    m = automap.Map(
        width=options.width,
        height=options.height,
        lat=lat,
        lon=lon,
        radius=radius,
        topo_resolution_max=options.topo_resolution_max,
        topo_resolution_min=options.topo_resolution_min,
        show_topo=options.show_topo,
        show_grid=options.show_grid,
        illuminate=options.illuminate,
        **color_kwargs)

    logger.debug('map configuration:\n%s' % str(m))

    if options.show_cities:
        m.draw_cities()

    if stations:
        lats = [s.lat for s in stations]
        lons = [s.lon for s in stations]

        m.gmt.psxy(
            in_columns=(lons, lats),
            S='t8p',
            G='black',
            *m.jxyr)

        for s in stations:
            m.add_label(s.lat, s.lon, '%s' % '.'.join(
                x for x in s.nsl() if x))

    if events:
        beachball_symbol = 'mt'
        beachball_size = 20.0
        for ev in events:
            if ev.moment_tensor is None:
                m.gmt.psxy(
                    in_rows=[[ev.lon, ev.lat]],
                    S='c12p',
                    G=gmtpy.color('scarletred2'),
                    W='1p,black',
                    *m.jxyr)

            else:
                devi = ev.moment_tensor.deviatoric()
                mt = devi.m_up_south_east()
                mt = mt / ev.moment_tensor.scalar_moment() \
                    * pmt.magnitude_to_moment(5.0)
                m6 = pmt.to6(mt)
                data = (ev.lon, ev.lat, 10) + tuple(m6) + (1, 0, 0)

                if m.gmt.is_gmt5():
                    kwargs = dict(
                        M=True,
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size / gmtpy.cm))
                else:
                    kwargs = dict(
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size*2 / gmtpy.cm))

                m.gmt.psmeca(
                    in_rows=[data],
                    G=gmtpy.color('chocolate1'),
                    E='white',
                    W='1p,%s' % gmtpy.color('chocolate3'),
                    *m.jxyr,
                    **kwargs)

    m.save(args[-1])
from pyrocko import pile, io, model, util
import sys

"""
Extrahiere miniseed-Daten um Events, die aus Katalog ausgelesen werden
"""

# mit sys.argv kann man alles abfragen, was hinter dem Programmaufruf steht. 
dirs = sys.argv[1:]

# lade das Stationsfile:
stats = model.load_stations('/scratch/local1/doctar/meta/stations.txt')

# lese Events:
events = model.load_events('/home/zmaw/u254061/master/event_marker_IPMA.txt')

# Erstelle ein pile aus allen miniseed in den Ordnern, die man hinter dem Programmaufruf angegeben hat:
outpile = pile.make_pile(dirs)

f=open('/home/zmaw/u254061/master/event_marker_IPMA.txt')
for line in f:
    if line.lstrip().startswith('#'):
        continue
    toks = line.split()
    timedate, timetime =toks[1], toks[2]
    gtime = util.str_to_time(str(timedate+' '+timetime))

    trange = [gtime-100, gtime+1000]
    new_pile = []
    for traces in outpile.chopper(trange[0], trange[1], load_data=True, degap=False):
        if traces:
예제 #31
0
# Stations as black triangles. Genuine GMT commands can be parsed by the maps'
# gmt attribute. Last argument of the psxy function call pipes the maps'
# pojection system.
m.gmt.psxy(in_columns=(lons, lats), S='t20p', G='black', *m.jxyr)

# Station labels
for i in range(len(stations)):
    m.add_label(lats[i], lons[i], labels[i])

# Load events from catalog file (generated using catalog.GlobalCMT()
# download from www.globalcmt.org)
# If no moment tensor is provided in the catalogue, the event is plotted
# as a red circle. Symbol size relative to magnitude.

events = model.load_events('deadsea_events_1976-2017.txt')
beachball_symbol = 'd'
factor_symbl_size = 5.0
for ev in events:
    mag = ev.magnitude
    if ev.moment_tensor is None:
        ev_symb = 'c' + str(mag * factor_symbl_size) + 'p'
        m.gmt.psxy(in_rows=[[ev.lon, ev.lat]],
                   S=ev_symb,
                   G=gmtpy.color('scarletred2'),
                   W='1p,black',
                   *m.jxyr)
    else:
        devi = ev.moment_tensor.deviatoric()
        beachball_size = mag * factor_symbl_size
        mt = devi.m_up_south_east()
예제 #32
0
from pyrocko import model, orthodrome as ortho
stations = model.load_stations('GERES/array_center.pf')
events = model.load_events('castor_event_IGN.dat')
for s in stations:
    print(s)
    print('distance in deg ',
          ortho.distance_accurate50m(s, events[0]) / 110.54 / 1000.)
예제 #33
0
# Stations as black triangles. Genuine GMT commands can be parsed by the maps'
# gmt attribute. Last argument of the psxy function call pipes the maps'
# pojection system.
m.gmt.psxy(in_columns=(lons, lats), S='t20p', G='black', *m.jxyr)

# Station labels
for i in range(len(stations)):
    m.add_label(lats[i], lons[i], labels[i])


# Load events from catalog file (generated using catalog.GlobalCMT() to download from www.globalcmt.org)
# If no moment tensor is provided in the catalogue, the event is plotted as a red circle.
# Symbol size relative to magnitude.

events = model.load_events('deadsea_events_1976-2017.txt')
beachball_symbol = 'd'
factor_symbl_size = 5.0
for ev in events:
    mag = ev.magnitude
    if ev.moment_tensor is None:
        ev_symb = 'c'+str(mag*factor_symbl_size)+'p' 
        m.gmt.psxy(
            in_rows=[[ev.lon, ev.lat]],
            S=ev_symb,
            G=gmtpy.color('scarletred2'),
            W='1p,black',
            *m.jxyr)
    else:
        devi = ev.moment_tensor.deviatoric()
        beachball_size = mag*factor_symbl_size
예제 #34
0
파일: automap.py 프로젝트: wuxyair/pyrocko
def main(args=None):
    if args is None:
        args = sys.argv[1:]

    parser = OptionParser(
        usage=usage,
        description=description)

    parser.add_option(
        '--width',
        dest='width',
        type='float',
        default=20.0,
        metavar='FLOAT',
        help='set width of output image [cm] (%default)')

    parser.add_option(
        '--height',
        dest='height',
        type='float',
        default=15.0,
        metavar='FLOAT',
        help='set height of output image [cm] (%default)')

    parser.add_option(
        '--topo-resolution-min',
        dest='topo_resolution_min',
        type='float',
        default=40.0,
        metavar='FLOAT',
        help='minimum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--topo-resolution-max',
        dest='topo_resolution_max',
        type='float',
        default=200.0,
        metavar='FLOAT',
        help='maximum resolution of topography [dpi] (%default)')

    parser.add_option(
        '--no-grid',
        dest='show_grid',
        default=True,
        action='store_false',
        help='don\'t show grid lines')

    parser.add_option(
        '--no-topo',
        dest='show_topo',
        default=True,
        action='store_false',
        help='don\'t show topography')

    parser.add_option(
        '--no-cities',
        dest='show_cities',
        default=True,
        action='store_false',
        help='don\'t show cities')

    parser.add_option(
        '--no-illuminate',
        dest='illuminate',
        default=True,
        action='store_false',
        help='deactivate artificial illumination of topography')

    parser.add_option(
        '--illuminate-factor-land',
        dest='illuminate_factor_land',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of land (0.5)')

    parser.add_option(
        '--illuminate-factor-ocean',
        dest='illuminate_factor_ocean',
        type='float',
        metavar='FLOAT',
        help='set factor for artificial illumination of ocean (0.25)')

    parser.add_option(
        '--theme',
        choices=['topo', 'soft'],
        default='topo',
        help='select color theme, available: topo, soft (topo)"')

    parser.add_option(
        '--download-etopo1',
        dest='download_etopo1',
        action='store_true',
        help='download full ETOPO1 topography dataset')

    parser.add_option(
        '--download-srtmgl3',
        dest='download_srtmgl3',
        action='store_true',
        help='download full SRTMGL3 topography dataset')

    parser.add_option(
        '--make-decimated-topo',
        dest='make_decimated',
        action='store_true',
        help='pre-make all decimated topography datasets')

    parser.add_option(
        '--stations',
        dest='stations_fn',
        metavar='FILENAME',
        help='load station coordinates from FILENAME')

    parser.add_option(
        '--events',
        dest='events_fn',
        metavar='FILENAME',
        help='load event coordinates from FILENAME')

    parser.add_option(
        '--debug',
        dest='debug',
        action='store_true',
        default=False,
        help='print debugging information to stderr')

    (options, args) = parser.parse_args(args)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.download_etopo1:
        import pyrocko.datasets.topo.etopo1
        pyrocko.datasets.topo.etopo1.download()

    if options.download_srtmgl3:
        import pyrocko.datasets.topo.srtmgl3
        pyrocko.datasets.topo.srtmgl3.download()

    if options.make_decimated:
        import pyrocko.datasets.topo
        pyrocko.datasets.topo.make_all_missing_decimated()

    if (options.download_etopo1 or options.download_srtmgl3 or
            options.make_decimated) and len(args) == 0:

        sys.exit(0)

    if options.theme == 'soft':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.2,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.15,
            'color_wet': (216, 242, 254),
            'color_dry': (238, 236, 230),
            'topo_cpt_wet': 'light_sea_uniform',
            'topo_cpt_dry': 'light_land_uniform'}
    elif options.theme == 'topo':
        color_kwargs = {
            'illuminate_factor_land': options.illuminate_factor_land or 0.5,
            'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.25}

    events = []
    if options.events_fn:
        events = model.load_events(options.events_fn)

    stations = []

    if options.stations_fn:
        stations = model.load_stations(options.stations_fn)

    if not (len(args) == 4 or (
            len(args) == 1 and (events or stations))):

        parser.print_help()
        sys.exit(1)

    if len(args) == 4:
        try:
            lat = float(args[0])
            lon = float(args[1])
            radius = float(args[2])*km
        except Exception:
            parser.print_help()
            sys.exit(1)
    else:
        lats, lons = latlon_arrays(stations+events)
        lat, lon = map(float, od.geographic_midpoint(lats, lons))
        radius = float(
            num.max(od.distance_accurate50m_numpy(lat, lon, lats, lons)))
        radius *= 1.1

    m = automap.Map(
        width=options.width,
        height=options.height,
        lat=lat,
        lon=lon,
        radius=radius,
        topo_resolution_max=options.topo_resolution_max,
        topo_resolution_min=options.topo_resolution_min,
        show_topo=options.show_topo,
        show_grid=options.show_grid,
        illuminate=options.illuminate,
        **color_kwargs)

    logger.debug('map configuration:\n%s' % str(m))

    if options.show_cities:
        m.draw_cities()

    if stations:
        lats = [s.lat for s in stations]
        lons = [s.lon for s in stations]

        m.gmt.psxy(
            in_columns=(lons, lats),
            S='t8p',
            G='black',
            *m.jxyr)

        for s in stations:
            m.add_label(s.lat, s.lon, '%s' % '.'.join(
                x for x in s.nsl() if x))

    if events:
        beachball_symbol = 'mt'
        beachball_size = 20.0
        for ev in events:
            if ev.moment_tensor is None:
                m.gmt.psxy(
                    in_rows=[[ev.lon, ev.lat]],
                    S='c12p',
                    G=gmtpy.color('scarletred2'),
                    W='1p,black',
                    *m.jxyr)

            else:
                devi = ev.moment_tensor.deviatoric()
                mt = devi.m_up_south_east()
                mt = mt / ev.moment_tensor.scalar_moment() \
                    * pmt.magnitude_to_moment(5.0)
                m6 = pmt.to6(mt)
                data = (ev.lon, ev.lat, 10) + tuple(m6) + (1, 0, 0)

                if m.gmt.is_gmt5():
                    kwargs = dict(
                        M=True,
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size / gmtpy.cm))
                else:
                    kwargs = dict(
                        S='%s%g' % (
                            beachball_symbol[0], beachball_size*2 / gmtpy.cm))

                m.gmt.psmeca(
                    in_rows=[data],
                    G=gmtpy.color('chocolate1'),
                    E='white',
                    W='1p,%s' % gmtpy.color('chocolate3'),
                    *m.jxyr,
                    **kwargs)

    m.save(args[-1])
예제 #35
0
def plot(settings, show=False):

    #align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = 'P'
    zoom_window = settings.zoom
    ampl_scaler = '4*standard deviation'

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(':')
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = filter(
        lambda s: match_nslc('%s.%s.%s.*' % s.nsl(), traces[0].nslc_id),
        stations)
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info('%s ... skipping.' % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning('%s  Using nearest neighbor instead.' % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = store.config.distance_max - farthest_source.distance_to(
                    t)
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)
    depth_count = dict(zip(sorted(alldepths), range(len(alldepths))))

    target_count = dict(
        zip([t.codes[:3] for t in targets], range(len(targets))))

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == 'velocity':
            tr = integrate_differentiate(tr, 'differentiate')

        onset = engine.get_store(t.store_id).t('begin',
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=event.time + onset + zoom_window[0],
                          tmax=event.time + onset + zoom_window[1])
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c='black', linewidth=1., alpha=1.)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color='black',
                            alpha=0.5)
        ax.text(zoom_window[0] * 1.09,
                y_pos,
                '%1.1f' % (s.depth / 1000.),
                horizontalalignment='right')  #, fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = 'pP'
            arrivals = mod.arrivals(phases=[cake.PhaseDef(label)],
                                    distances=[s.distance_to(t) * cake.m2d],
                                    zstart=s.depth)

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c='blue')

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    #fontsize=12,
                    color='black',
                    verticalalignment='top',
                    horizontalalignment='right')

            except IndexError:
                logger.warning(
                    'no pP phase at d=%s z=%s stat=%s' %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception('No Trace found!')
    if len(traces) > 1:
        raise Exception('More then one trace provided!')
    else:
        onset = 0
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == 'displacement':
            tr = integrate_differentiate(tr, 'integrate')
        tr = settings.do_filter(tr)
        onset = engine.get_store(targets[0].store_id).t(
            'begin', (event.depth, s.distance_to(targets[0]))) + event.time
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis='y',
                           which='both',
                           left='off',
                           right='off',
                           labelleft='off')

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(tmin=onset + zoom_window[0] + correction,
                          tmax=onset + zoom_window[1] + correction)
        if ampl_scaler == 'trace min/max':
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == '4*standard deviation':
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel('Time [s]')
        ax.text(0.0,
                0.6,
                'Source depth [km]',
                rotation=90,
                horizontalalignment='left',
                transform=fig.transFigure)  #, fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0., c='black')
        vline.set_linestyle('--')
    if settings.title:
        params = {
            'array-id': ''.join(station.nsl()),
            'event_name': event.name,
            'event_time': time_to_str(event.time)
        }
        ax.text(0.5,
                1.05,
                settings.title % params,
                horizontalalignment='center',
                transform=ax.transAxes)
    if settings.auto_caption:
        cax = fig.add_axes([0., 0., 1, 0.05], label='caption')
        cax.axis('off')
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == 'displacement':
            quantity_info = 'integrated velocity trace. '
        if settings.quantity == 'velocity':
            quantity_info = 'differentiated synthetic traces. '
        if settings.quantity == 'restituted':
            quantity_info = 'restituted traces. '

        captions = {'filters': ''}
        for f in settings.filters:
            captions['filters'] += '%s-pass, order %s, f$_c$=%s Hz. ' % (
                f.type, f.order, f.corner)
        captions['quantity_info'] = quantity_info
        captions['store_sampling'] = 1. / store.config.deltat
        cax.text(
            0,
            0,
            'Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s'
            % captions,
            fontsize=12,
            transform=cax.transAxes)
        plt.subplots_adjust(hspace=.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info('save as: %s ' % settings.save_as)
        options = settings.__dict__
        options.update({'array-id': ''.join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches='tight')
    if show:
        plt.show()
예제 #36
0
def command_matrix(args):
    '''
    Execution of command matrix
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

        parser.add_option('--view',
                          dest='view',
                          action='store_true',
                          help='view similarity matrix')

        parser.add_option('--savefig',
                          dest='savefig',
                          action='store_true',
                          help='save figure of similarity matrix')

    parser, options, args = cl_parse('matrix', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if not os.path.isdir(conf.project_dir):
        die('project directory missing: %s' % conf.project_dir)

    simmat_temporal_fn = os.path.join(conf.project_dir, 'simmat_temporal.npy')

    if ((not options.force) and (os.path.isfile(simmat_temporal_fn))):
        die('similarity matrix exists: %s; use force option' %
            simmat_temporal_fn)

    catalog_ref_fn = os.path.join(conf.project_dir, 'catalog.pf')
    if os.path.isfile(catalog_ref_fn):
        allevents = model.load_events(catalog_ref_fn)
    else:
        die('catalog missing: %s' % catalog_ref_fn)

    if conf.sw_simmat:
        if not os.path.isfile(conf.sim_mat_fn):
            die('similarity matrix missing: %s' % conf.sim_mat_fn)
        if conf.sim_mat_type == 'binary':
            if os.path.isfile(conf.sim_mat_fn):
                simmat = sccluster.load_similarity_matrix(conf.sim_mat_fn)
            else:
                die('cannot read similarity matrix: %s' % conf.sim_mat_fn)
        else:
            die('ascii format for similarity matrix not yet implemented')

        if len(allevents) != len(simmat):
            print(len(allevents), len(simmat))
            die('clustering stopped, number of events ' +
                'differs from matrix size')

        new_catalog_fn = os.path.join(conf.project_dir,
                                      'events_to_be_clustered.pf')
        model.dump_events(allevents, new_catalog_fn)

    else:
        if conf.metric in config.acceptable_mt_based_metrics:
            events = [ev for ev in allevents if ev.moment_tensor is not None]
        else:
            events = [ev for ev in allevents]
        new_catalog_fn = os.path.join(conf.project_dir,
                                      'events_to_be_clustered.pf')
        model.dump_events(events, new_catalog_fn)

        simmat = sccluster.compute_similarity_matrix(events, conf.metric)

    sccluster.save_similarity_matrix(simmat, simmat_temporal_fn)

    simmat_fig_fn = os.path.join(conf.project_dir,
                                 'simmat_temporal.' + conf.figure_format)
    if options.view and options.savefig:
        scplot.view_and_savefig_similarity_matrix(simmat, simmat_fig_fn,
                                                  'Sorted chronologically')
    else:
        if options.view:
            scplot.view_similarity_matrix(simmat, 'Sorted chronologically')
        if options.savefig:
            scplot.savefig_similarity_matrix(simmat, simmat_fig_fn,
                                             'Sorted chronologically')

    print('Similarity matrix computed and stored as "%s"' % simmat_temporal_fn)
    if options.savefig:
        print('Similarity matrix figure saved as "%s"' % simmat_fig_fn)
def plot(settings, show=False):

    # align_phase = 'P(cmb)P<(icb)(cmb)p'
    with_onset_line = False
    fill = True
    align_phase = "P"
    zoom_window = list(settings.zoom)
    ampl_scaler = "4*standard deviation"

    quantity = settings.quantity
    zstart, zstop, inkr = settings.depths.split(":")
    test_depths = num.arange(
        float(zstart) * km,
        float(zstop) * km,
        float(inkr) * km)

    try:
        traces = io.load(settings.trace_filename)
    except FileLoadError as e:
        logger.info(e)
        return

    event = model.load_events(settings.event_filename)
    assert len(event) == 1
    event = event[0]
    event.depth = float(settings.depth) * 1000.0
    base_source = MTSource.from_pyrocko_event(event)

    test_sources = []
    for d in test_depths:
        s = base_source.clone()
        s.depth = float(d)
        test_sources.append(s)
    if settings.store_superdirs:
        engine = LocalEngine(store_superdirs=settings.store_superdirs)
    else:
        engine = LocalEngine(use_config=True)
    try:
        store = engine.get_store(settings.store_id)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return

    stations = model.load_stations(settings.station_filename)
    station = list(
        filter(lambda s: match_nslc("%s.%s.%s.*" % s.nsl(), traces[0].nslc_id),
               stations))
    assert len(station) == 1
    station = station[0]
    targets = [
        station_to_target(station,
                          quantity=quantity,
                          store_id=settings.store_id)
    ]
    try:
        request = engine.process(targets=targets, sources=test_sources)
    except seismosizer.NoSuchStore as e:
        logger.info("%s ... skipping." % e)
        return
    except meta.OutOfBounds as error:
        if settings.force_nearest_neighbor:
            logger.warning("%s  Using nearest neighbor instead." % error)
            mod_targets = []
            for t in targets:
                closest_source = min(test_sources,
                                     key=lambda s: s.distance_to(t))
                farthest_source = max(test_sources,
                                      key=lambda s: s.distance_to(t))
                min_dist_delta = store.config.distance_min - closest_source.distance_to(
                    t)
                max_dist_delta = (store.config.distance_max -
                                  farthest_source.distance_to(t))
                if min_dist_delta < 0:
                    azi, bazi = closest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, min_dist_delta * cake.m2d)
                elif max_dist_delta < 0:
                    azi, bazi = farthest_source.azibazi_to(t)
                    newlat, newlon = ortho.azidist_to_latlon(
                        t.lat, t.lon, azi, max_dist_delta * cake.m2d)
                t.lat, t.lon = newlat, newlon
                mod_targets.append(t)
            request = engine.process(targets=mod_targets, sources=test_sources)
        else:
            logger.error("%s: %s" % (error, ".".join(station.nsl())))
            return

    alldepths = list(test_depths)

    fig = plt.figure()
    ax = fig.add_subplot(111)
    maxz = max(test_depths)
    minz = min(test_depths)
    relative_scale = (maxz - minz) * 0.02
    for s, t, tr in request.iter_results():
        if quantity == "velocity":
            tr = integrate_differentiate(tr, "differentiate")

        onset = engine.get_store(t.store_id).t("begin",
                                               (s.depth, s.distance_to(t)))

        tr = settings.do_filter(tr)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / num.max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = s.depth
        xdata = tr.get_xdata() - onset - s.time
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=event.time + onset + zoom_window[0],
            tmax=event.time + onset + zoom_window[1],
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ampl_scale /= settings.gain
        ydata = (tr_ydata / ampl_scale) * relative_scale + y_pos
        ax.plot(xdata, ydata, c="black", linewidth=1.0, alpha=1.0)
        if False:
            ax.fill_between(xdata,
                            y_pos,
                            ydata,
                            where=ydata < y_pos,
                            color="black",
                            alpha=0.5)
        ax.text(
            zoom_window[0] * 1.09,
            y_pos,
            "%1.1f" % (s.depth / 1000.0),
            horizontalalignment="right",
        )  # , fontsize=12.)
        if False:
            mod = store.config.earthmodel_1d
            label = "pP"
            arrivals = mod.arrivals(
                phases=[cake.PhaseDef(label)],
                distances=[s.distance_to(t) * cake.m2d],
                zstart=s.depth,
            )

            try:
                t = arrivals[0].t
                ydata_absmax = num.max(num.abs(tr.get_ydata()))
                marker_length = 0.5
                x_marker = [t - onset] * 2
                y = [
                    y_pos - (maxz - minz) * 0.025,
                    y_pos + (maxz - minz) * 0.025
                ]
                ax.plot(x_marker, y, linewidth=1, c="blue")

                ax.text(
                    x_marker[1] - x_marker[1] * 0.005,
                    y[1],
                    label,
                    # fontsize=12,
                    color="black",
                    verticalalignment="top",
                    horizontalalignment="right",
                )

            except IndexError:
                logger.warning(
                    "no pP phase at d=%s z=%s stat=%s" %
                    (s.distance_to(t) * cake.m2d, s.depth, station.station))
                pass

    if len(traces) == 0:
        raise Exception("No Trace found!")
    if len(traces) > 1:
        raise Exception("More then one trace provided!")
    else:
        tr = traces[0]
        correction = float(settings.correction)
        if quantity == "displacement":
            tr = integrate_differentiate(tr, "integrate")
        tr = settings.do_filter(tr)
        onset = (engine.get_store(targets[0].store_id).t(
            "begin", (event.depth, s.distance_to(targets[0]))) + event.time)
        if settings.normalize:
            tr.set_ydata(tr.get_ydata() / max(abs(tr.get_ydata())))
            ax.tick_params(axis="y",
                           which="both",
                           left="off",
                           right="off",
                           labelleft="off")

        y_pos = event.depth
        xdata = tr.get_xdata() - onset + correction
        tr_ydata = tr.get_ydata() * -1
        visible = tr.chop(
            tmin=onset + zoom_window[0] + correction,
            tmax=onset + zoom_window[1] + correction,
        )
        if ampl_scaler == "trace min/max":
            ampl_scale = float(max(abs(visible.get_ydata())))
        elif ampl_scaler == "4*standard deviation":
            ampl_scale = 4 * float(num.std(visible.get_ydata()))
        else:
            ampl_scale = 1.0
        ydata = (tr_ydata / ampl_scale * settings.gain *
                 settings.gain_record) * relative_scale + y_pos
        ax.plot(xdata, ydata, c=settings.color, linewidth=1.0)
        ax.set_xlim(zoom_window)
        zmax = max(test_depths)
        zmin = min(test_depths)
        zrange = zmax - zmin
        ax.set_ylim((zmin - zrange * 0.2, zmax + zrange * 0.2))
        ax.set_xlabel("Time [s]")
        ax.text(
            0.0,
            0.6,
            "Source depth [km]",
            rotation=90,
            horizontalalignment="left",
            transform=fig.transFigure,
        )  # , fontsize=12.)

    if fill:
        ax.fill_between(xdata,
                        y_pos,
                        ydata,
                        where=ydata < y_pos,
                        color=settings.color,
                        alpha=0.5)
    if with_onset_line:
        ax.text(0.08, zmax + zrange * 0.1, align_phase, fontsize=14)
        vline = ax.axvline(0.0, c="black")
        vline.set_linestyle("--")
    if settings.title:
        params = {
            "array-id": "".join(station.nsl()),
            "event_name": event.name,
            "event_time": time_to_str(event.time),
        }
        ax.text(
            0.5,
            1.05,
            settings.title % params,
            horizontalalignment="center",
            transform=ax.transAxes,
        )
    if settings.auto_caption:
        cax = fig.add_axes([0.0, 0.0, 1, 0.05], label="caption")
        cax.axis("off")
        cax.xaxis.set_visible(False)
        cax.yaxis.set_visible(False)
        if settings.quantity == "displacement":
            quantity_info = "integrated velocity trace. "
        if settings.quantity == "velocity":
            quantity_info = "differentiated synthetic traces. "
        if settings.quantity == "restituted":
            quantity_info = "restituted traces. "

        captions = {"filters": ""}
        for f in settings.filters:
            captions["filters"] += "%s-pass, order %s, f$_c$=%s Hz. " % (
                f.type,
                f.order,
                f.corner,
            )
        captions["quantity_info"] = quantity_info
        captions["store_sampling"] = 1.0 / store.config.deltat
        cax.text(
            0,
            0,
            "Filters: %(filters)s f$_{GF}$=%(store_sampling)s Hz.\n%(quantity_info)s"
            % captions,
            fontsize=12,
            transform=cax.transAxes,
        )
        plt.subplots_adjust(hspace=0.4, bottom=0.15)
    else:
        plt.subplots_adjust(bottom=0.1)

    ax.invert_yaxis()
    if settings.save_as:
        logger.info("save as: %s " % settings.save_as)
        options = settings.__dict__
        options.update({"array-id": "".join(station.nsl())})
        fig.savefig(settings.save_as % options, dpi=160, bbox_inches="tight")
    if show:
        plt.show()
예제 #38
0
def report(env,
           report_config=None,
           update_without_plotting=True,
           make_index=True,
           make_archive=True,
           nthreads=0):

    if report_config is None:
        report_config = ReportConfig()
        report_config.set_basepath('.')

    event_name = env.get_current_event_name()
    logger.info('Creating report entry for run "%s"...' % event_name)

    fp = report_config.expand_path
    entry_path = expand_template(
        op.join(fp(report_config.report_base_path),
                report_config.entries_sub_path),
        dict(event_name=event_name, problem_name=event_name))

    if op.exists(entry_path) and not update_without_plotting:
        shutil.rmtree(entry_path)

    util.ensuredir(entry_path)
    plots_dir_out = op.join(entry_path, 'plots')
    util.ensuredir(plots_dir_out)
    configs_dir = op.join(op.split(__file__)[0], 'app/configs/')
    rundir_path = env.get_rundir_path()
    try:
        os.system("cp -r %s/grun/plots/* %s" % (rundir_path, plots_dir_out))
    except:
        pass
    os.system("cp %s/plot_collection.yaml %s" % (configs_dir, plots_dir_out))

    util.ensuredir("%s/shakemap/default/" % (plots_dir_out))
    os.system(
        "cp %s/*shakemap.png %s/shakemap/default/shakemap.default.gf_shakemap.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/shakemap.default.plot_group.yaml %s/shakemap/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/location/default/" % (plots_dir_out))
    os.system(
        "cp %s/*location.png %s/location/default/location.default.location.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/location.default.plot_group.yaml %s/location/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/production_data/default/" % (plots_dir_out))
    os.system(
        "cp %s/*production_data.png %s/production_data/default/production_data.default.production_data.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/production_data.default.plot_group.yaml %s/production_data/default/"
        % (configs_dir, plots_dir_out))

    util.ensuredir("%s/waveforms/default/" % (plots_dir_out))
    os.system(
        "cp %s/waveforms_1.png %s/waveforms/default/waveforms.default.waveforms_1.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_2.png %s/waveforms/default/waveforms.default.waveforms_2.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_3.png %s/waveforms/default/waveforms.default.waveforms_3.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/waveforms.default.plot_group.yaml %s/waveforms/default/" %
              (configs_dir, plots_dir_out))

    os.system("cp %s/grun/config.yaml %s/config.yaml" %
              (rundir_path, entry_path))

    try:

        event = model.load_events(rundir_path + "event.txt")
        model.dump_events(event,
                          filename=op.join(entry_path, 'event_reference.yaml'),
                          format="yaml")
        event = event[0]
        from silvertine import plot
        pcc = report_config.plot_config_collection.get_weeded(env)
        plot.make_plots(env,
                        plots_path=op.join(entry_path, 'plots'),
                        plot_config_collection=pcc)

        try:
            run_info = env.get_run_info()
        except environment.NoRundirAvailable:
            run_info = None

        rie = ReportIndexEntry(path='.',
                               problem_name=event_name,
                               silvertine_version="0.01",
                               run_info=run_info)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_best = guts.load(filename=fn)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_reference = guts.load(filename=fn)

        fn = op.join(entry_path, 'index.yaml')
        guts.dump(rie, filename=fn)

        logger.info('Done creating report entry for run "%s".' % "test")

    #    report_index(report_config)

    #    if make_archive:
    #        report_archive(report_config)
    except FileNotFoundError:
        pass
예제 #39
0
def command_cluster(args):
    '''
    Execution of command cluster
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

        parser.add_option('--view',
                          dest='view',
                          action='store_true',
                          help='view similarity matrix after clustering')

        parser.add_option(
            '--savefig',
            dest='savefig',
            action='store_true',
            help='save figure of similarity matrix after clustering')

    parser, options, args = cl_parse('cluster', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if not os.path.isdir(conf.project_dir):
        die('project directory missing: %s' % conf.project_dir)

    resdir = os.path.join(conf.project_dir, 'clustering_results')
    if not (options.force):
        if (os.path.isdir(resdir)):
            die('clustering result directory exists; use force option')
    if options.force:
        if (os.path.isdir(resdir)):
            shutil.rmtree(resdir)
    os.mkdir(resdir)

    simmat_temporal_fn = os.path.join(conf.project_dir, 'simmat_temporal.npy')
    if not os.path.isfile(simmat_temporal_fn):
        die('similarity matrix does not exists: %s; ' +
            'use seiscloud matrix first' % simmat_temporal_fn)

    new_catalog_fn = os.path.join(conf.project_dir,
                                  'events_to_be_clustered.pf')
    if not os.path.isfile(new_catalog_fn):
        die('catalog of selected events does not exists: %s; ' +
            'use seiscloud matrix first' % new_catalog_fn)

    simmat_temp = sccluster.load_similarity_matrix(simmat_temporal_fn)
    events = model.load_events(new_catalog_fn)
    eventsclusters = sccluster.dbscan(simmat_temp, conf.dbscan_nmin,
                                      conf.dbscan_eps,
                                      conf.sw_force_cluster_all)
    clusters = sccluster.get_clusters(events, eventsclusters)
    if min(eventsclusters) == -1:
        noise_cluster_empty = False
    else:
        noise_cluster_empty = True

    sccluster.save_all(events, eventsclusters, clusters, conf, resdir)
    simmat_clus = sccluster.get_simmat_clustered(events, eventsclusters,
                                                 clusters, conf, resdir,
                                                 simmat_temp)

    simmat_clustered_fn = os.path.join(conf.project_dir,
                                       'simmat_clustered.npy')
    sccluster.save_similarity_matrix(simmat_clus, simmat_clustered_fn)

    print('I run seiscloud for the project in "%s"' % conf.project_dir)
    n_clusters = len(clusters)
    if not noise_cluster_empty:
        n_clusters = n_clusters - 1
    print(str(n_clusters) + ' cluster(s) found')

    simmat_fig_fn = os.path.join(conf.project_dir,
                                 'simmat_clustered.' + conf.figure_format)
    if options.view and options.savefig:
        scplot.view_and_savefig_similarity_matrix(simmat_clus, simmat_fig_fn,
                                                  'Sorted after clustering')
    else:
        if options.view:
            scplot.view_similarity_matrix(simmat_clus,
                                          'Sorted after clustering')
        if options.savefig:
            scplot.savefig_similarity_matrix(simmat_clus, simmat_fig_fn,
                                             'Sorted after clustering')

    for cluster in clusters:
        selevents = clusters[cluster]
        median = sccluster.get_median(selevents, conf)
        fn_median = os.path.join(resdir,
                                 'median_cluster' + str(cluster) + '.pf')
        model.dump_events([median], fn_median)


#        print(cluster,len(selevents))

    print('Similarity matrix after clustering computed and stored as "%s"' %
          simmat_clustered_fn)
    if options.savefig:
        print('Similarity matrix figure saved as "%s"' % simmat_fig_fn)
                        help='name of file containing station information',
                        required=True)
    parser.add_argument('--events',
                        help='name of file containing event catalog',
                        default=False,
                        required=False)
    parser.add_argument('--printall',
                        help='Print all results to terminal',
                        default=True,
                        required=False,
                        action='store_true')
    parser.add_argument('--show',
                        help='show figure at the end',
                        default=False,
                        required=False,
                        action='store_true')
    args = parser.parse_args()

    stations = model.load_stations(args.stations)

    if args.usestations:
        stations = [s for s in stations if util.match_nslc(args.usestations, s.nsl())]

    events = []
    if args.events:
        events.extend(model.load_events(args.events))
    if args.markers:
        markers = gui_util.load_markers(args.markers)
        events.extend([m.get_event() for m in markers])
    get_bounds(stations, events=events, usestations=args.usestations, printall=args.printall, show_fig=args.show)
예제 #41
0
def command_plot(args):
    '''
    Execution of command plot
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

    parser, options, args = cl_parse('plot', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if not os.path.isdir(conf.project_dir):
        die('project directory missing: %s' % conf.project_dir)

    resdir = os.path.join(conf.project_dir, 'clustering_results')
    if not os.path.isdir(resdir):
        die('clustering results missing: %s' % resdir)

    plotdir = os.path.join(conf.project_dir, 'clustering_plots')
    resdir = os.path.join(conf.project_dir, 'clustering_results')
    if not (options.force):
        if (os.path.isdir(plotdir)):
            die('clustering plot directory exists; use force option')
    if options.force:
        if (os.path.isdir(plotdir)):
            shutil.rmtree(plotdir)
    os.mkdir(plotdir)

    simmat_temporal_fn = os.path.join(conf.project_dir, 'simmat_temporal.npy')
    simmat_clustered_fn = os.path.join(conf.project_dir,
                                       'simmat_clustered.npy')
    if not os.path.isfile(simmat_temporal_fn):
        die('similarity matrix does not exists: %s; ' +
            'use seiscloud matrix first' % simmat_temporal_fn)
    if not os.path.isfile(simmat_clustered_fn):
        die('similarity matrix does not exists: %s; ' +
            'use seiscloud matrix first' % simmat_clustered_fn)

    new_catalog_fn = os.path.join(conf.project_dir,
                                  'events_to_be_clustered.pf')
    if not os.path.isfile(new_catalog_fn):
        die('catalog of selected events does not exists: %s; ' +
            'use seiscloud matrix and seiscloud cluster first' %
            new_catalog_fn)

    events = model.load_events(new_catalog_fn)
    eventsclusters = sccluster.load_obj(
        os.path.join(resdir, 'processed.eventsclusters'))
    clusters = sccluster.load_obj(os.path.join(resdir, 'processed.clusters'))

    scplot.plot_all(events, eventsclusters, clusters, conf, resdir, plotdir)

    print('Seiscloud plots prepared in "%s"' % plotdir)