Ejemplo n.º 1
0
    def __init__(self, ntracks=6, use_opengl=False, panel_parent=None, follow=20):
        source_pile = pile.make_pile(['Demodataset_new.mseed'])
        p = hamster_pile.HamsterPile()
        p.set_fixation_length(20.)
        pile_viewer.PileViewer.__init__(self, p, ntracks_shown_max=ntracks,
                use_opengl=use_opengl, panel_parent=panel_parent)

        self._tlast = time.time()
        self._tlast_stalta = None
        self._tmin = source_pile.tmin + self._tlast % (source_pile.tmax - source_pile.tmin)
        
        v = self.get_view()
        v.follow(float(follow))

        self._source_pile = source_pile

        self._timer = QTimer( self )
        self.connect( self._timer, SIGNAL("timeout()"), self.periodical ) 
        self._timer.setInterval(1000)
        self._timer.start()
        
        self._detectiontimer = QTimer( self )
        self.connect( self._detectiontimer, SIGNAL("timeout()"), self.stalta ) 
        self._detectiontimer.setInterval(3000)
        self._detectiontimer.start()
Ejemplo n.º 2
0
def read_file(f, l):
    s = False
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        try:
            s = read(f, format="MSEED")
        except Exception as e:
            try:
                l.write(
                    str(e) +
                    ": obspy error messages\n===> NOW Trying toswitch to pyrocko decoding\n"
                )
            except:
                print("Error writing Log file during obspy alternative")
            try:
                s = pile.make_pile([f]).to_obspy_stream()
            except Exception as p:
                if len(w):
                    for wm in w:
                        try:
                            l.write(
                                str(wm.message) +
                                ": general read warning messages\n")
                        except:
                            pass
                try:
                    l.write(str(p) + ": pyrocko error messages\n")
                except:
                    print("Error writing Log file during pyrocko alternative")
                l.close()
                sys.exit(1)
    return s
Ejemplo n.º 3
0
 def __init__(self, files, scale=1., station_mapping={}, network_mapping={},
              channel_mapping={}, location_mapping={}):
     self.scale = scale
     self.nslc_mapping = {}
     self.data_pile = pile.make_pile(files)
     self.noise = self.dictify_noise(self.data_pile, network_mapping, channel_mapping,
                                     station_mapping, location_mapping)
     self.merge_fader = trace.CosFader(xfrac=0.1)
Ejemplo n.º 4
0
def snuffle(config):
    global _lassie_config
    _lassie_config = copy.deepcopy(config)
    for _ifc in _lassie_config.image_function_contributions:
        _ifc.setup(config)

    def load_snuffling(win):
        s = LassieSnuffling()
        s.config = _lassie_config
        s.setup()
        win.pile_viewer.viewer.add_snuffling(s, reloaded=True)
        win.pile_viewer.viewer.add_blacklist_pattern('*.SMAX.i.*')
        for bl in _lassie_config.blacklist:
            win.pile_viewer.viewer.add_blacklist_pattern('%s.*' % bl)

        detections_path = _lassie_config.get_detections_path()

        if os.path.exists(detections_path):
            s.detections = detections_to_event_markers(detections_path)
            s.add_markers(s.detections)

        for _ifc in s.config.image_function_contributions:
            if isinstance(_ifc, ifc.ManualPickIFC):
                markers_path_extra = _ifc.picks_path
            elif isinstance(_ifc, ifc.TemplateMatchingIFC):
                markers_path_extra = _ifc.template_markers_path
            else:
                continue

            if os.path.exists(markers_path_extra):
                s.add_markers(pmarker.load_markers(markers_path_extra))
            else:
                logger.warn(
                    'No such file: %s (referenced in %s, named %s)' %
                    (markers_path_extra, _ifc.__class__.__name__, _ifc.name))

    receivers = config.get_receivers()
    stations = set()
    lats, lons = geo.points_coords(receivers, system='latlon')
    for ir, (lat, lon) in enumerate(zip(lats, lons)):
        n, s, l = receivers[ir].codes[:3]
        stations.add(
            model.Station(lat=lat, lon=lon, network=n, station=s, location=l))

    paths = config.expand_path(config.data_paths)
    paths.append(config.get_ifm_dir_path())

    p = pile.make_pile(paths=paths, fileformat='detect')

    meta = {'tabu': True}
    for tr in p.iter_traces(trace_selector=lambda x: x.station == 'SMAX'):
        if tr.meta:
            tr.meta.update(meta)
        else:
            tr.meta = meta

    snuffler.snuffle(p, stations=stations, launch_hook=load_snuffling)
Ejemplo n.º 5
0
    def setup(self):
        self.data_pile = pile.make_pile(
            self.data_paths, fileformat=self.data_format)

        if self.data_pile.is_empty():
            sys.exit('Data pile is empty!')

        self.deltat_want = self.config.deltat_want or \
                min(self.data_pile.deltats.keys())

        self.n_samples = int(
                (self.config.sample_length + self.config.tpad) / self.deltat_want)

        logger.debug('loading marker file %s' % self.fn_markers)

        # loads just plain markers:
        markers = marker.load_markers(self.fn_markers)

        if self.fn_events:
            markers.extend(
                    [marker.EventMarker(e) for e in
                load_events(self.fn_events)])

        if self.sort_markers:
            logger.info('sorting markers!')
            markers.sort(key=lambda x: x.tmin)
        marker.associate_phases_to_events(markers)

        markers_by_nsl = {}
        for m in markers:
            if not m.match_nsl(self.config.reference_target.codes[:3]):
                continue

            if m.get_phasename().upper() != self.align_phase:
                continue

            markers_by_nsl.setdefault(m.one_nslc()[:3], []).append(m)

        assert(len(markers_by_nsl) == 1)

        # filter markers that do not have an event assigned:
        self.markers = list(markers_by_nsl.values())[0]

        if not self.labeled:
            dummy_event = Event(lat=0., lon=0., depth=0.)
            for m in self.markers:
                if not m.get_event():
                    m.set_event(dummy_event)

        self.markers = [m for m in self.markers if m.get_event() is not None]

        if not len(self.markers):
            raise Exception('No markers left in dataset')

        self.config.channels = list(self.data_pile.nslc_ids.keys())
        self.config.channels.sort()
Ejemplo n.º 6
0
 def make_pile(self):
     '''Create a pile.
     
     To be overloaded in subclass. The default implementation just calls
     :py:func:`pyrocko.pile.make_pile` to create a pile from command line arguments.
     '''
     
     cachedirname = '/tmp/snuffle_cache_%s' % os.environ['USER']
     sources = self._cli_params.get('sources', sys.argv[1:])
     return pile.make_pile(sources, cachedirname=cachedirname, regex=self._cli_params['regex'], fileformat=self._cli_params['format'])
Ejemplo n.º 7
0
def load_eqt_folder(data_paths,
                    tinc,
                    path,
                    tmin="2021-05-26 06:20:03.800",
                    tmax="2016-02-12 06:20:03.800",
                    minlat=49.1379,
                    maxlat=49.1879,
                    minlon=8.1223,
                    maxlon=8.1723,
                    channels=["EH" + "[ZNE]"],
                    client_list=["BGR"],
                    download=True,
                    seiger=True,
                    selection=None,
                    path_waveforms=None,
                    sds=None,
                    stream=False,
                    data_format='mseed',
                    deltat_want=100,
                    tstart=None,
                    tstop=None,
                    hf=8,
                    lf=2,
                    models=[]):
    data_pile = pile.make_pile(data_paths,
                               fileformat=data_format,
                               show_progress=False)

    iter_chunked(tinc,
                 path,
                 data_pile,
                 tmin=tmin,
                 tmax=tmax,
                 minlat=minlat,
                 maxlat=maxlat,
                 minlon=minlon,
                 maxlon=maxlon,
                 channels=channels,
                 client_list=client_list,
                 download=download,
                 seiger=seiger,
                 selection=selection,
                 path_waveforms=path_waveforms,
                 stream=stream,
                 sds=sds,
                 reject_blacklisted=None,
                 tpad=0,
                 tstart=None,
                 tstop=None,
                 hf=hf,
                 lf=lf,
                 models=models)
Ejemplo n.º 8
0
    def setup(self):
        self.data_pile = pile.make_pile(
            self.data_paths, fileformat=self.data_format)

        if self.data_pile.is_empty():
            sys.exit('Data pile is empty!')

        self.deltat_want = self.config.deltat_want or \
                min(self.data_pile.deltats.keys())

        self.n_samples = int(
                (self.config.sample_length + self.config.tpad) / self.deltat_want)

        logger.debug('loading marker file %s' % self.fn_markers)

        # loads just plain markers:
        markers = marker.load_markers(self.fn_markers)

        if self.fn_events:
            markers.extend(
                    [marker.EventMarker(e) for e in
                load_events(self.fn_events)])

        marker.associate_phases_to_events(markers)
        markers = [m for m in markers if isinstance(m, marker.PhaseMarker)]

        markers_dict = defaultdict(list)
        for m in markers:
            if m.get_phasename().upper() != self.align_phase:
                continue

            markers_dict[m.get_event()].append(m)

        self.markers = []
        for e, _markers in markers_dict.items():
            first = min(_markers, key=lambda x: x.tmin)
            self.markers.append(first)

        if not self.labeled:
            dummy_event = Event(lat=0., lon=0., depth=0.)
            for m in self.markers:
                if not m.get_event():
                    m.set_event(dummy_event)

        self.markers = [m for m in self.markers if m.get_event() is not None]

        if not len(self.markers):
            raise Exception('No markers left in dataset')

        self.config.channels = list(self.data_pile.nslc_ids.keys())
        self.config.channels.sort()
Ejemplo n.º 9
0
    def testMisfitOfSameTracesDtDifferentShifted(self):
        """
        Tests:
            Different length
            Different delta t
            Shifted
            L2-Norm
            L1-Norm
            time- and frequency-domain 
        """
        test_file = os.path.join(os.path.dirname(__file__),
                                 '../examples/1989.072.evt.mseed')
        p = pile.make_pile(test_file, show_progress=False)
        rt = p.all()[0]
        tt = rt.copy()

        # make downsampled, chopped copies:
        deltats = [0.5, 1.0, 2.0]
        tt.chop(tmin=rt.tmin + 10, tmax=rt.tmax - 15)
        tts = [tt.copy() for i in range(len(deltats))]
        [t.downsample_to(deltats[i]) for i, t in enumerate(tts)]

        # shift traces:
        t_shifts = [1.0, 0.49999, 0.5]
        for ts in t_shifts:
            tts_shifted = [t.copy() for t in tts]
            map(lambda x: x.shift(ts), tts_shifted)
            tts.extend(tts_shifted)

        a = rt.tmin
        d = rt.tmax
        b = a + (d - a) / 10
        c = d - (d - a) / 10

        taper = trace.CosTaper(a, b, c, d)
        fresponse = trace.FrequencyResponse()
        norms = [1, 2]
        domains = ['time_domain', 'frequency_domain', 'envelope', 'absolute']
        setups = [
            trace.MisfitSetup(norm=n,
                              taper=taper,
                              domain=domain,
                              filter=fresponse) for domain in domains
            for n in norms
        ]

        for cand in tts:
            for setup in setups:
                m, n = rt.misfit(candidate=cand, setup=setup)
                self.assertNotEqual(m, None, 'misfit\'s m is None')
Ejemplo n.º 10
0
    def testMisfitOfSameTracesDtDifferentShifted(self):
        """
        Tests:
            Different length
            Different delta t
            Shifted
            L2-Norm
            L1-Norm
            time- and frequency-domain
        """
        test_file = os.path.join(
            os.path.dirname(__file__),
            '../examples/1989.072.evt.mseed')
        p = pile.make_pile(test_file, show_progress=False)
        rt = p.all()[0]
        tt = rt.copy()

        # make downsampled, chopped copies:
        deltats = [0.5, 1.0, 2.0]
        tt.chop(tmin=rt.tmin+10, tmax=rt.tmax-15)
        tts = [tt.copy() for i in range(len(deltats))]
        [t.downsample_to(deltats[i]) for i, t in enumerate(tts)]

        # shift traces:
        t_shifts = [1.0, 0.49999, 0.5]
        for ts in t_shifts:
            tts_shifted = [t.copy() for t in tts]
            map(lambda x: x.shift(ts), tts_shifted)
            tts.extend(tts_shifted)

        a = rt.tmin
        d = rt.tmax
        b = a+(d-a)/10
        c = d-(d-a)/10

        taper = trace.CosTaper(a, b, c, d)
        fresponse = trace.FrequencyResponse()
        norms = [1, 2]
        domains = ['time_domain', 'frequency_domain', 'envelope', 'absolute']
        setups = [trace.MisfitSetup(
            norm=n,
            taper=taper,
            domain=domain,
            filter=fresponse) for domain in domains for n in norms]

        for cand in tts:
            for setup in setups:
                m, n = rt.misfit(candidate=cand, setup=setup)
                self.assertNotEqual(m, None, 'misfit\'s m is None')
Ejemplo n.º 11
0
 def setUpClass(cls):
     '''
     Create a reusable snuffler instance for all tests cases.
     '''
     super(GUITest, cls).setUpClass()
     cls.snuffler = Snuffler()  # noqa
     fpath = common.test_data_file('test2.mseed')
     p = make_pile(fpath, show_progress=False)
     cls.win = SnufflerWindow(pile=p)
     cls.pile_viewer = cls.win.pile_viewer
     pv = cls.pile_viewer
     cls.main_control_defaults = dict(
         highpass_control=pv.highpass_control.get_value(),
         lowpass_control=pv.lowpass_control.get_value(),
         gain_control=pv.gain_control.get_value(),
         rot_control=pv.rot_control.get_value())
Ejemplo n.º 12
0
    def setUpClass(cls):
        '''
        Create a reusable snuffler instance for all tests cases.
        '''
        super(GUITest, cls).setUpClass()
        if no_gui:  # nosetests runs this even when class is has @skip
            return

        cls.snuffler = Snuffler()  # noqa
        fpath = common.test_data_file('test2.mseed')
        p = make_pile(fpath, show_progress=False)
        cls.win = SnufflerWindow(pile=p)
        cls.pile_viewer = cls.win.pile_viewer
        cls.viewer = cls.win.pile_viewer.viewer
        pv = cls.pile_viewer
        cls.main_control_defaults = dict(
            highpass_control=pv.highpass_control.get_value(),
            lowpass_control=pv.lowpass_control.get_value(),
            gain_control=pv.gain_control.get_value(),
            rot_control=pv.rot_control.get_value())
Ejemplo n.º 13
0
    def setUpClass(cls):
        '''
        Create a reusable snuffler instance for all tests cases.
        '''
        super(GUITest, cls).setUpClass()
        if no_gui:  # nosetests runs this even when class is has @skip
            return

        from pyrocko.gui import snuffler as sm

        cls.snuffler = sm.get_snuffler_instance()

        fpath = common.test_data_file('test2.mseed')
        p = make_pile(fpath, show_progress=False)
        cls.win = SnufflerWindow(pile=p)
        cls.pile_viewer = cls.win.pile_viewer
        cls.viewer = cls.win.pile_viewer.viewer
        pv = cls.pile_viewer
        cls.main_control_defaults = dict(
            highpass_control=pv.highpass_control.get_value(),
            lowpass_control=pv.lowpass_control.get_value(),
            gain_control=pv.gain_control.get_value(),
            rot_control=pv.rot_control.get_value())
Ejemplo n.º 14
0
def search(config,
           override_tmin=None,
           override_tmax=None,
           show_detections=False,
           show_movie=False,
           show_window_traces=False,
           force=False,
           stop_after_first=False,
           nparallel=6,
           save_imax=False,
           bark=False):

    fp = config.expand_path

    run_path = fp(config.run_path)

    # if op.exists(run_path):
    #     if force:
    #         shutil.rmtree(run_path)
    #     else:
    #         raise common.LassieError(
    #             'run directory already exists: %s' %
    #             run_path)

    util.ensuredir(run_path)

    write_config(config, op.join(run_path, 'config.yaml'))

    ifm_path_template = config.get_ifm_path_template()
    detections_path = config.get_detections_path()
    events_path = config.get_events_path()
    figures_path_template = config.get_figures_path_template()

    config.setup_image_function_contributions()
    ifcs = config.image_function_contributions

    grid = config.get_grid()
    receivers = config.get_receivers()

    norm_map = gridmod.geometrical_normalization(grid, receivers)

    data_paths = fp(config.data_paths)
    for data_path in fp(data_paths):
        if not op.exists(data_path):
            pass

    p = pile.make_pile(data_paths, fileformat='detect')
    if p.is_empty():
        raise common.LassieError('no usable waveforms found')

    for ifc in ifcs:
        ifc.prescan(p)

    shift_tables = []
    tshift_minmaxs = []
    for ifc in ifcs:
        shift_tables.append(ifc.get_table(grid, receivers))
        tshift_minmaxs.append(num.nanmin(shift_tables[-1]))
        tshift_minmaxs.append(num.nanmax(shift_tables[-1]))

    fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)

    tshift_min = min(tshift_minmaxs)
    tshift_max = max(tshift_minmaxs)

    if config.detector_tpeaksearch is not None:
        tpeaksearch = config.detector_tpeaksearch
    else:
        tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min

    tpad = max(ifc.get_tpad() for ifc in ifcs) + \
        (tshift_max - tshift_min) + tpeaksearch

    tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad
    tavail = p.tmax - p.tmin
    tinc = min(tinc, tavail - 2.0 * tpad)

    if tinc <= 0:
        raise common.LassieError('available waveforms too short \n'
                                 'required: %g s\n'
                                 'available: %g s\n' % (2. * tpad, tavail))

    blacklist = set(tuple(s.split('.')) for s in config.blacklist)
    whitelist = set(tuple(s.split('.')) for s in config.whitelist)

    distances = grid.distances(receivers)
    distances_to_grid = num.min(distances, axis=0)

    distance_min = num.min(distances)
    distance_max = num.max(distances)

    station_index = dict(
        (rec.codes, i) for (i, rec) in enumerate(receivers)
        if rec.codes not in blacklist and (
            not whitelist or rec.codes in whitelist) and (
                config.distance_max is None
                or distances_to_grid[i] <= config.distance_max))

    check_data_consistency(p, config)

    deltat_cf = max(p.deltats.keys())
    assert deltat_cf > 0.0

    while True:
        if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs):
            break

        deltat_cf *= 2
    logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' %
                (deltat_cf, 1.0 / deltat_cf))

    ngridpoints = grid.size()

    logger.info('number of grid points: %i' % ngridpoints)
    logger.info('minimum source-receiver distance: %g m' % distance_min)
    logger.info('maximum source-receiver distance: %g m' % distance_max)
    logger.info('minimum travel-time: %g s' % tshift_min)
    logger.info('maximum travel-time: %g s' % tshift_max)

    idetection = 0

    tmin = override_tmin or config.tmin or p.tmin + tpad
    tmax = override_tmax or config.tmax or p.tmax - tpad

    events = config.get_events()
    twindows = []
    if events is not None:
        for ev in events:
            if tmin <= ev.time <= tmax:
                twindows.append(
                    (ev.time + tshift_min - (tshift_max - tshift_min) *
                     config.event_time_window_factor,
                     ev.time + tshift_min + (tshift_max - tshift_min) *
                     config.event_time_window_factor))

    else:
        twindows.append((tmin, tmax))

    for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows):

        nwin = int(math.ceil((tmax_win - tmin_win) / tinc))

        logger.info('start processing time window group %i/%i: %s - %s' %
                    (iwindow_group + 1, len(twindows),
                     util.time_to_str(tmin_win), util.time_to_str(tmax_win)))

        logger.info('number of time windows: %i' % nwin)
        logger.info('time window length: %g s' % (tinc + 2.0 * tpad))
        logger.info('time window payload: %g s' % tinc)
        logger.info('time window padding: 2 x %g s' % tpad)
        logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad /
                                                   (tinc + 2.0 * tpad)))

        iwin = -1

        for trs in p.chopper(
                tmin=tmin_win,
                tmax=tmax_win,
                tinc=tinc,
                tpad=tpad,
                want_incomplete=config.fill_incomplete_with_zeros,
                trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
            iwin += 1
            trs_ok = []
            for tr in trs:
                if tr.ydata.size == 0:
                    logger.warn('skipping empty trace: %s.%s.%s.%s' %
                                tr.nslc_id)

                    continue

                if not num.all(num.isfinite(tr.ydata)):
                    logger.warn('skipping trace because of invalid values: '
                                '%s.%s.%s.%s' % tr.nslc_id)

                    continue

                trs_ok.append(tr)

            trs = trs_ok

            if not trs:
                continue

            logger.info('processing time window %i/%i: %s - %s' %
                        (iwin + 1, nwin, util.time_to_str(
                            trs[0].wmin), util.time_to_str(trs[0].wmax)))

            wmin = trs[0].wmin
            wmax = trs[0].wmax

            if config.fill_incomplete_with_zeros:
                trs = zero_fill(trs, wmin - tpad, wmax + tpad)

            t0 = math.floor(wmin / deltat_cf) * deltat_cf
            iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf))
            iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf))
            lengthout = iwmax - iwmin + 1

            pdata = []
            trs_debug = []
            parstack_params = []
            for iifc, ifc in enumerate(ifcs):
                dataset = ifc.preprocess(trs, wmin - tpeaksearch,
                                         wmax + tpeaksearch,
                                         tshift_max - tshift_min, deltat_cf)
                if not dataset:
                    continue

                nstations_selected = len(dataset)

                nsls_selected, trs_selected = zip(*dataset)

                for tr in trs_selected:
                    tr.meta = {'tabu': True}

                trs_debug.extend(trs + list(trs_selected))

                istations_selected = num.array(
                    [station_index[nsl] for nsl in nsls_selected],
                    dtype=num.int)
                arrays = [tr.ydata.astype(num.float) for tr in trs_selected]

                offsets = num.array([
                    int(round((tr.tmin - t0) / deltat_cf))
                    for tr in trs_selected
                ],
                                    dtype=num.int32)

                w = ifc.get_weights(nsls_selected)

                weights = num.ones((ngridpoints, nstations_selected))
                weights *= w[num.newaxis, :]
                weights *= ifc.weight

                shift_table = shift_tables[iifc]

                ok = num.isfinite(shift_table[:, istations_selected])
                bad = num.logical_not(ok)

                shifts = -num.round(shift_table[:, istations_selected] /
                                    deltat_cf).astype(num.int32)

                weights[bad] = 0.0
                shifts[bad] = num.max(shifts[ok])

                pdata.append((list(trs_selected), shift_table, ifc))
                parstack_params.append((arrays, offsets, shifts, weights))

            if config.stacking_blocksize is not None:
                ipstep = config.stacking_blocksize
                frames = None
            else:
                ipstep = lengthout
                frames = num.zeros((ngridpoints, lengthout))

            twall_start = time.time()
            frame_maxs = num.zeros(lengthout)
            frame_argmaxs = num.zeros(lengthout, dtype=num.int)
            ipmin = iwmin
            while ipmin < iwmin + lengthout:
                ipsize = min(ipstep, iwmin + lengthout - ipmin)
                if ipstep == lengthout:
                    frames_p = frames
                else:
                    frames_p = num.zeros((ngridpoints, ipsize))

                for (arrays, offsets, shifts, weights) in parstack_params:
                    frames_p, _ = parstack(arrays,
                                           offsets,
                                           shifts,
                                           weights,
                                           0,
                                           offsetout=ipmin,
                                           lengthout=ipsize,
                                           result=frames_p,
                                           nparallel=nparallel,
                                           impl='openmp')

                if config.sharpness_normalization:
                    frame_p_maxs = frames_p.max(axis=0)
                    frame_p_means = num.abs(frames_p).mean(axis=0)
                    frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :]
                    frames_p *= norm_map[:, num.newaxis]

                if config.ifc_count_normalization:
                    frames_p *= 1.0 / len(ifcs)

                frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    frames_p.max(axis=0)
                frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    pargmax(frames_p)

                ipmin += ipstep
                del frames_p

            twall_end = time.time()

            logger.info('wallclock time for stacking: %g s' %
                        (twall_end - twall_start))

            tmin_frames = t0 + iwmin * deltat_cf

            tr_stackmax = trace.Trace('',
                                      'SMAX',
                                      '',
                                      '',
                                      tmin=tmin_frames,
                                      deltat=deltat_cf,
                                      ydata=frame_maxs)

            tr_stackmax.meta = {'tabu': True}

            trs_debug.append(tr_stackmax)

            if show_window_traces:
                trace.snuffle(trs_debug)

            ydata_window = tr_stackmax.chop(wmin, wmax,
                                            inplace=False).get_ydata()

            logger.info('CF stats: min %g, max %g, median %g' %
                        (num.min(ydata_window), num.max(ydata_window),
                         num.median(ydata_window)))
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s'
                    % nstations_selected)

            detector_threshold_seiger = config.detector_threshold - (
                (17 - nstations_selected) * 4
            )  # 17 is maximum number of seiger stations, 4 is a mean baseline for noise
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s, threshold now: %s'
                    % (nstations_selected, detector_threshold_seiger))

            tpeaks, apeaks = list(
                zip(*[(tpeak, apeak) for (tpeak, apeak) in zip(
                    *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch))
                      if wmin <= tpeak and tpeak < wmax])) or ([], [])

            tr_stackmax_indx = tr_stackmax.copy(data=False)
            tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32))
            tr_stackmax_indx.set_location('i')

            for (tpeak, apeak) in zip(tpeaks, apeaks):

                iframe = int(round((tpeak - tmin_frames) / deltat_cf))
                imax = frame_argmaxs[iframe]

                latpeak, lonpeak, xpeak, ypeak, zpeak = \
                    grid.index_to_location(imax)

                idetection += 1

                detection = Detection(id='%06i' % idetection,
                                      time=tpeak,
                                      location=geo.Point(lat=float(latpeak),
                                                         lon=float(lonpeak),
                                                         x=float(xpeak),
                                                         y=float(ypeak),
                                                         z=float(zpeak)),
                                      ifm=float(apeak))

                if bark:
                    common.bark()

                logger.info('detection found: %s' % str(detection))

                f = open(detections_path, 'a')
                f.write(
                    '%06i %s %g %g %g %g %g %g\n' %
                    (idetection,
                     util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'),
                     apeak, latpeak, lonpeak, xpeak, ypeak, zpeak))

                f.close()

                ev = detection.get_event()
                f = open(events_path, 'a')
                model.dump_events([ev], stream=f)
                f.close()

                if show_detections or config.save_figures:
                    fmin = min(ifc.fmin for ifc in ifcs)
                    fmax = min(ifc.fmax for ifc in ifcs)

                    fn = figures_path_template % {
                        'id': util.tts(t0).replace(" ", "T"),
                        'format': 'png'
                    }

                    util.ensuredirs(fn)

                    if frames is not None:
                        frames_p = frames
                        tmin_frames_p = tmin_frames
                        iframe_p = iframe

                    else:
                        iframe_min = max(
                            0, int(round(iframe - tpeaksearch / deltat_cf)))
                        iframe_max = min(
                            lengthout - 1,
                            int(round(iframe + tpeaksearch / deltat_cf)))

                        ipsize = iframe_max - iframe_min + 1
                        frames_p = num.zeros((ngridpoints, ipsize))
                        tmin_frames_p = tmin_frames + iframe_min * deltat_cf
                        iframe_p = iframe - iframe_min

                        for (arrays, offsets, shifts, weights) \
                                in parstack_params:

                            frames_p, _ = parstack(arrays,
                                                   offsets,
                                                   shifts,
                                                   weights,
                                                   0,
                                                   offsetout=iwmin +
                                                   iframe_min,
                                                   lengthout=ipsize,
                                                   result=frames_p,
                                                   nparallel=nparallel,
                                                   impl='openmp')

                        if config.sharpness_normalization:
                            frame_p_maxs = frames_p.max(axis=0)
                            frame_p_means = num.abs(frames_p).mean(axis=0)
                            frames_p *= (frame_p_maxs /
                                         frame_p_means)[num.newaxis, :]
                            frames_p *= norm_map[:, num.newaxis]

                        if config.ifc_count_normalization:
                            frames_p *= 1.0 / len(ifcs)
                    try:
                        plot.plot_detection(grid,
                                            receivers,
                                            frames_p,
                                            tmin_frames_p,
                                            deltat_cf,
                                            imax,
                                            iframe_p,
                                            xpeak,
                                            ypeak,
                                            zpeak,
                                            tr_stackmax,
                                            tpeaks,
                                            apeaks,
                                            detector_threshold_seiger,
                                            wmin,
                                            wmax,
                                            pdata,
                                            trs,
                                            fmin,
                                            fmax,
                                            idetection,
                                            tpeaksearch,
                                            movie=show_movie,
                                            show=show_detections,
                                            save_filename=fn,
                                            event=ev)
                    except:
                        pass

                    del frames_p

                if stop_after_first:
                    return

            tr_stackmax.chop(wmin, wmax)
            tr_stackmax_indx.chop(wmin, wmax)
            if save_imax is True:
                io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template)

            del frames
        logger.info('end processing time window group: %s - %s' %
                    (util.time_to_str(tmin_win), util.time_to_str(tmax_win)))
    cat = Catalog()
    files = glob("%s/../figures/*qml*" % run_path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
Ejemplo n.º 15
0
from pyrocko import io, trace, pile
from pyrocko.example import get_example_data

get_example_data('test.mseed')

traces = io.load('test.mseed')
traces[0].snuffle()  # look at a single trace
trace.snuffle(traces)  # look at a bunch of traces

# do something with the traces:
new_traces = []
for tr in traces:
    new = tr.copy()
    new.whiten()
    # to allow the viewer to distinguish the traces
    new.set_location('whitened')
    new_traces.append(new)

trace.snuffle(traces + new_traces)

# it is also possible to 'snuffle' a pile:
p = pile.make_pile(['test.mseed'])
p.snuffle()
Ejemplo n.º 16
0
    #fbands.apppend([4.0, 10.])

    phases = LocalEngine(store_superdirs=['/data/stores'],
                         default_store_id='globalttt').get_store()

    #filenames = glob.glob('data/*.mseed')
    #filenames = glob.glob('/data/webnet/waveform_R/2008/*.mseed')
    #datapath = '/data/webnet/mseed/2008'
    #datapath = '/data/webnet/waveform_R/2008'
    #datapath = '/data/share/Res_all_NKC'
    datapath = '/media/usb0/Res_all_NKC_taper'
    #datapath = '/media/usb0/restituted_pyrocko'
    stations = model.load_stations('../data/stations.pf')
    reference_id = 'NKC'
    references = {}
    data_pile = pile.make_pile(datapath, selector='rest_*')

    fband = {'order': 4, 'corner_hp': 1.0, 'corner_lp': 4.}
    window = StaticLengthWindow(static_length=30., phase_position=0.5)

    taper = trace.CosFader(xfrac=0.25)

    #event_selector = EventSelector(distmin=1000*km,
    #                               distmax=20000*km,
    #                               depthmin=2*km,
    #                               depthmax=600*km,
    #                               magmin=4.9)

    candidate_fn = '../candidates2013.pf'
    candidates = [
        m.get_event() for m in gui_util.Marker.load_markers(candidate_fn)
Ejemplo n.º 17
0
def call(events,
         data_paths,
         fmin=1,
         fmax=5,
         show_restituded_traces=False,
         stations=None,
         needs_restitution=True,
         make_markers=True,
         show_plot=True,
         modify_inplace=True):

    events.sort(key=lambda ev: ev.time)

    stations_dict = dict((s.nsl(), s) for s in stations)

    markers = []
    local_magnitudes = []
    p = pile.make_pile(data_paths, fileformat="mseed", show_progress=False)

    for event in events:
        mags = defaultdict(list)
        tpad = 2. / fmin

        def trace_selector(tr):
            c = tr.channel.upper()
            return c.endswith('E') or c.endswith('N') or \
                tr.location.endswith('_rest')

        distances = {}
        rest_traces = []

        event2 = copy.deepcopy(event)

        for tr in get_traces(p, event, stations_dict.values(), trace_selector,
                             tpad):

            nslc = tr.nslc_id

            try:
                tr.highpass(4, fmin, nyquist_exception=True)
                tr.lowpass(4, fmax, nyquist_exception=True)
            except:
                pass

            try:
                station = stations_dict[nslc[:3]]
            except KeyError as e:
                print(e)
                continue

            if needs_restitution is True:
                resp = get_response(nslc)
                try:
                    tr_vel = tr.transfer(tfade=tpad,
                                         freqlimits=(fmin * 0.5, fmin, fmax,
                                                     fmax * 2.0),
                                         transfer_function=resp,
                                         invert=True)
                except trace.TraceTooShort as e:
                    continue

            else:
                try:
                    tr_vel = tr.transfer(
                        tfade=tpad,
                        freqlimits=(fmin * 0.5, fmin, fmax, fmax * 2.0),
                        transfer_function=wood_anderson_response,
                        invert=False)
                except trace.TraceTooShort as e:
                    continue

            distance = orthodrome.distance_accurate50m(event, station)

            tr_vel.set_codes(location=tr_vel.location + '_rest')
            tr_vel.meta = dict(tabu=True)
            t_of_max, amplitude = tr_vel.absmax()

            if show_restituded_traces:
                rest_traces.append(tr_vel)
                m_nslc = tr_vel.nslc_id
            else:
                m_nslc = tr.nslc_id

            mag = local_magnitude(distance, amplitude)
            if make_markers is True:
                markers.append(
                    PhaseMarker([m_nslc],
                                t_of_max,
                                t_of_max,
                                1,
                                phasename='%3.1f' % mag,
                                event=event2))

            mags[nslc[:2]].append(mag)
            distances[nslc[:2]] = distance

        if not mags:
            continue

        for k in mags:
            mags[k] = max(mags[k])

        local_magnitude = round(num.median(list(mags.values())), 1)

        if show_plot is True:
            data = []
            for k in mags:
                data.append((distances[k], mags[k]))

            dists, mags_arr = num.array(data).T

            dists /= km
            fig = plt.figure()
            axes = fig.add_subplot(1, 1, 1)
            axes.plot(dists, mags_arr, 'o', color=to01(graph_colors[0]))
            for x, y, label in zip(dists, mags_arr, mags.keys()):
                axes.text(x, y, '.'.join(label))

            axes.axhline(local_magnitude, color=to01(graph_colors[0]))
            mag_std = num.std(list(mags.values()))

            msg = 'local magnitude: %s, std: %s' % \
                (round(local_magnitude, 1),
                    round(mag_std, 1))
            axes.text(max(dists),
                      local_magnitude,
                      msg,
                      verticalalignment='bottom',
                      horizontalalignment='right')

            axes.axhspan(local_magnitude - mag_std,
                         local_magnitude + mag_std,
                         alpha=0.1)

            axes.set_xlabel('Distance [km]')
            axes.set_ylabel('Local Magnitude')
            plt.savefig("ml_%s.png" % event.name)

        local_magnitudes.append(local_magnitude)

    if modify_inplace is True:
        event.magnitude = local_magnitude
Ejemplo n.º 18
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
Ejemplo n.º 19
0
from pyrocko import trace
from pyrocko import model
from pyrocko import pile
from pyrocko import gui_util
from pyrocko.gf import LocalEngine
from autogain import autogain, util_optic

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
km = 1000.

if __name__ == '__main__':

    # Where is your data stored?:
    datapath = '/media/usb/webnet/pole_zero/restituted_displacement/2013Mar'
    data_pile = pile.make_pile(datapath)

    # And the stations:
    stations = model.load_stations('/media/usb/webnet/meta/stations.pf')

    # Station code of the trace you want to scale agains:
    reference_id ='NKC'

    # Frequency band to use:
    fband = {'order':4, 'corner_hp':1.0, 'corner_lp':4.}

    # And a taper to avoid filtering artefacts.
    taper = trace.CosFader(xfrac=0.25)

    # Define a window to chop traces. In this case a static length of 20
    # seconds will be used and the synthetic phase arrival will be in the 
Ejemplo n.º 20
0
def load_data_archieve(validation_data,
                       gf_freq,
                       duration=4,
                       wanted_start=None,
                       wanted_end=None):
    folder = validation_data
    pathlist = Path(folder).glob('day*')
    waveforms = []
    stations = []
    if wanted_start is not None:
        try:
            wanted_start = util.stt(wanted_start)
            wanted_end = util.stt(wanted_end)
        except:
            pass

    from pyrocko import pile
    paths = []
    safecon = 0
    for path in sorted(pathlist):
        path = str(path)
        d2 = float(str(path)[-12:])
        d1 = float(str(path)[-25:-13])
        if wanted_start is not None:
            do_safety_files = False
            if (d1 >= wanted_start
                    and d2 <= wanted_end) or (d2 - wanted_end < 86400.
                                              and d2 - wanted_end > 0.
                                              and safecon == 0):
                st = model.load_stations(path + "/waveforms/stations.raw.txt")

                d_diff = d2 - d1
                tr_packages = int(d_diff / duration)
                #for tr in traces:
                #    tr.downsample_to(gf_freq)
                #        if safecon == 0:

                pathlist_waveform_files = Path(path + "/waveforms/rest/").glob(
                    '*.mseed')
                wanted_start_str = util.tts(wanted_start)[14:16]
                diff_to_full = float(wanted_start_str)
                max_diff = 55.
                min_diff = 5.
                if diff_to_full > max_diff or diff_to_full < min_diff:
                    do_safety_files = True
                for path_wave in sorted(pathlist_waveform_files):
                    path_wave = str(path_wave)
                    p1 = path_wave[-25:-15]
                    p2 = path_wave[-14:-12]
                    p3 = path_wave[-11:-9]
                    p4 = path_wave[-8:-6]
                    try:
                        file_time = util.stt(p1 + " " + p2 + ":" + p3 + ":" +
                                             p4)
                        tdiff = file_time - wanted_start
                        if do_safety_files is True:
                            if float(p2) - float(
                                    util.tts(wanted_start)[11:13]) == 0:
                                paths.append(str(path_wave))
                            if diff_to_full > max_diff and float(p2) - float(
                                    util.tts(wanted_start)[11:13]) == 1.:
                                paths.append(str(path_wave))
                            if diff_to_full < min_diff and float(p2) - float(
                                    util.tts(wanted_start)[11:13]) == -1.:
                                paths.append(str(path_wave))

                        else:
                            if float(p2) - float(
                                    util.tts(wanted_start)[11:13]) == 0:
                                paths.append(str(path_wave))
                    except:
                        pass

                safecon += 1

    p = pile.make_pile(paths)
    for traces in p.chopper(tmin=wanted_start, tinc=duration):
        if traces:
            if traces[0].tmax < wanted_end:
                #    for i in range(0, tr_packages):
                #        traces = traces
                #for tr in traces:
                #    tr.chop(tr.tmin+i*duration,
                #            tr.tmin+i*duration+duration)
                #tr.downsample_to(gf_freq)
                waveforms.append(traces)
                stations.append(st)
    #    else:
    #        traces = io.load(path+"/waveforms/rest/traces.mseed")
    #        st = model.load_stations(path+"/waveforms/stations.raw.txt")
    #        for tr in traces:
    #            tr.downsample_to(gf_freq)
    #        waveforms.append(traces)
    #        stations.append(st)
    return waveforms, stations
Ejemplo n.º 21
0
def iris_pull(options, conf, event_names):
    conf = conf['iris_pull_config']

    if not event_names:
        sys.exit('need event name')

    preparator = InvResponsePreparator(conf.begin_phase, conf.end_phase,
                                       conf.inv_response_frequencyband)

    for event_name in event_names:
        conf.event_name = event_name
        event = _get_event_infos(conf)
        tevent = event.time

        station_query_save_path = conf.path('station_query_save_path')
        if os.path.exists(station_query_save_path):
            logger.info('Using stored station query.')
            all_stations = pload(station_query_save_path)
        else:
            logger.info('Querying for stations...')
            all_stations = combi_get_stations(
                lat=event.lat,
                lon=event.lon,
                rmin=conf.query_rmin,
                rmax=conf.query_rmax,
                tmin=tevent,
                tmax=tevent + 3600.,
                channel_pattern=conf.query_channel_pattern)

            util.ensuredirs(station_query_save_path)
            pdump(all_stations, station_query_save_path)

        nstations = len(set(
            (sta.network, sta.station) for sta in all_stations))
        logger.info('Station query returned %i station%s' %
                    (nstations, plural_s(nstations)))
        preferred_ns = set(
            iris_get_vnets(conf.preferred_virtual_networks,
                           tmin=tevent,
                           tmax=tevent + 3600.))
        preferred_n = set(conf.preferred_networks)

        for station in all_stations:
            station.set_event_relative_data(event)

        raw_trace_path = conf.path('raw_trace_path')

        nsl_all = set(get_nsl(s) for s in all_stations)

        state_save_path = conf.path('state_save_path')
        if os.path.exists(state_save_path):
            nsl_ok, nsl_blacklist, nsl_use = pload(state_save_path)
        else:
            nsl_ok = set()
            nsl_blacklist = set()
            nsl_use = set()

        manual_blacklist_path = conf.path('manual_blacklist_path')
        nsl_blacklist.update(read_manual_blacklist(manual_blacklist_path))

        nsl_selected = set()
        nwanted = conf.get_or_none('nwanted')
        assert len(nwanted) == 2

        while True:
            if nwanted:
                selected = select_stations(all_stations,
                                           nwanted[1],
                                           preferred_n=preferred_n,
                                           preferred_ns=preferred_ns,
                                           preferred_nsl=nsl_ok,
                                           blacklist_nsl=nsl_blacklist)
            else:
                selected = all_stations

            nsl_selected = set(get_nsl(s) for s in selected)

            download = [s for s in selected if get_nsl(s) not in nsl_ok]
            nsl_download = set(get_nsl(s) for s in download)

            combi_get_responses(download, tevent, conf.path('resp_path'))

            tmin = preparator.get_tmin_limit(event, download)
            tmax = preparator.get_tmax_limit(event, download)

            logger.info('Loading data for event %s:\n%s' %
                        (event_name, str_nsl_selection(nsl_download)))
            try:
                fns = combi_get_data(
                    download,
                    tmin,
                    tmax,
                    raw_trace_path,
                    neach=conf.query_nstations_per_datarequest)
                p = pile.make_pile(fns, show_progress=False)
                prepared_trace_path = conf.path('prepared_trace_path')
                for traces in preparator.iter_prepare(p, event, download):
                    for tr in traces:
                        nsl_ok.add(get_nsl(tr))
                    io.save(traces, prepared_trace_path)

            except iris_ws.NotFound:
                pass

            logger.info('Blacklisting:\n%s' %
                        str_nsl_selection(nsl_download - nsl_ok))

            nsl_blacklist.update(nsl_download - nsl_ok)
            preferred_ns.update(set(nsl[:2] for nsl in nsl_ok))

            nsl_use = nsl_ok & nsl_selected

            logger.info('Have %i stations with data.' % len(nsl_use))

            if not nwanted:
                break

            else:
                if len(nsl_selected) == len(nsl_all):
                    break

                if len(nsl_use) > nwanted[0]:
                    break

        pdump((nsl_ok, nsl_blacklist, nsl_use), state_save_path)

        if nwanted and len(nsl_use) >= nwanted[1]:
            selected = select_stations(selected,
                                       nwanted[1],
                                       preferred_n=preferred_n,
                                       preferred_ns=preferred_ns,
                                       blacklist_nsl=nsl_blacklist)
            nsl_selected = set(get_nsl(s) for s in selected)
            nsl_use = nsl_ok & nsl_selected

        stations = [s for s in all_stations if get_nsl(s) in nsl_use]

        model.dump_stations(stations, conf.path('stations_ok_path'))
        model.dump_stations(all_stations, conf.path('stations_all_path'))

        if nsl_use:
            logger.info('Data available for event %s:\n%s' %
                        (event_name, str_nsl_selection(nsl_use)))
        else:
            logger.info('No data availabe for event %s' % event_name)
Ejemplo n.º 22
0
 def dump_pile(self, fn='test_dumped_seismograms.mseed'):
     pile.make_pile(seismograms.values(), fn=fn)
"""
Extrahiere miniseed-Daten um Events, die aus Katalog ausgelesen werden
"""

# mit sys.argv kann man alles abfragen, was hinter dem Programmaufruf steht. 
dirs = sys.argv[1:]

# lade das Stationsfile:
stats = model.load_stations('/scratch/local1/doctar/meta/stations.txt')

# lese Events:
events = model.load_events('/home/zmaw/u254061/master/event_marker_IPMA.txt')

# Erstelle ein pile aus allen miniseed in den Ordnern, die man hinter dem Programmaufruf angegeben hat:
outpile = pile.make_pile(dirs)

f=open('/home/zmaw/u254061/master/event_marker_IPMA.txt')
for line in f:
    if line.lstrip().startswith('#'):
        continue
    toks = line.split()
    timedate, timetime =toks[1], toks[2]
    gtime = util.str_to_time(str(timedate+' '+timetime))

    trange = [gtime-100, gtime+1000]
    new_pile = []
    for traces in outpile.chopper(trange[0], trange[1], load_data=True, degap=False):
        if traces:
            window_start = traces[0].wmin
            timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H') 
Ejemplo n.º 24
0
    #fbands.apppend([4.0, 10.])

    phases = LocalEngine(store_superdirs=['/data/stores'],
                         default_store_id='globalttt').get_store()

    #filenames = glob.glob('data/*.mseed')
    #filenames = glob.glob('/data/webnet/waveform_R/2008/*.mseed')
    #datapath = '/data/webnet/mseed/2008'
    #datapath = '/data/webnet/waveform_R/2008'
    #datapath = '/data/share/Res_all_NKC'
    datapath = '/media/usb0/Res_all_NKC_taper'
    #datapath = '/media/usb0/restituted_pyrocko'
    stations = model.load_stations('../data/stations.pf')
    reference_id ='NKC'
    references = {}
    data_pile = pile.make_pile(datapath, selector='rest_*')


    fband = {'order':4, 'corner_hp':1.0, 'corner_lp':4.}
    window = StaticLengthWindow(static_length=30., 
                                phase_position=0.5)

    taper = trace.CosFader(xfrac=0.25)

    #event_selector = EventSelector(distmin=1000*km,
    #                               distmax=20000*km,
    #                               depthmin=2*km,
    #                               depthmax=600*km,
    #                               magmin=4.9)

    candidate_fn = '../candidates2013.pf'
Ejemplo n.º 25
0
    def load(self, inv):
        # load the data as a pyrocko pile and reform them into an array of traces
        data = pile.make_pile([self.wdir + self.reduction])
        self.traces = data.all()

        # load station file
        fname = self.wdir + self.network
        stations_list = model.load_stations(fname)

        for s in stations_list:
            s.set_channels_by_name(*self.component.split())

        self.targets = []
        self.tmin, self.tmax = [], []
        self.arrivals = []
        self.names = []

        for station, tr in zip(stations_list,
                               self.traces):  # iterate over all stations
            # print station.lat, station.lon
            target = Target(
                lat=np.float(station.lat),  # station lat.
                lon=np.float(station.lon),  # station lon.
                store_id=inv.store,  # The gf-store to be used for this target,
                # we can also employ different gf-stores for different targets.
                interpolation='multilinear',  # interp. method between gf cells
                quantity='displacement',  # wanted retrieved quantity
                codes=station.nsl() +
                ('BH' + self.component, ))  # Station and network code

            # Next we extract the expected arrival time for this station from the the store,
            # so we can use this later to define a cut-out window for the optimization:
            self.targets.append(target)
            self.names.append(station.nsl()[1])

        # print len(self.traces), len(self.targets)

        for station, tr, target in zip(stations_list, self.traces,
                                       self.targets):

            engine = LocalEngine(store_superdirs=inv.store_path)
            store = engine.get_store(inv.store)
            # trace.snuffle(tr, events=self.events)
            arrival = store.t(self.phase, self.base_source,
                              target)  # expected P-wave arrival
            # print arrival
            tmin = self.base_source.time + arrival - 15  # start 15s before theor. arrival
            tmax = self.base_source.time + arrival + 15  # end 15s after theor. arrival
            # # print self.tmin,self.tmax
            tr.chop(tmin=tmin, tmax=tmax)
            self.tmin.append(tmin)
            self.tmax.append(tmax)
            self.arrivals.append(self.base_source.time + arrival)

        self.Npoints = len(self.targets)
        # data vector
        self.d = []
        self.d.append(map((lambda x: getattr(x, 'ydata')), self.traces))
        self.d = flatten(self.d)
        # time vector
        t = []
        for i in xrange(self.Npoints):
            t.append(self.traces[i].get_xdata())
        # self.t.append(map((lambda x: getattr(x,'get_xdata()')),self.traces))
        # convert time
        self.t = time2dec(map(util.time_to_str, flatten(t)))
        # print self.t
        self.N = len(self.d)
Ejemplo n.º 26
0
def prep_psd_fct(i_st,
                 st,
                 l,
                 subset_catalog,
                 dir_f,
                 arrT_array,
                 arrT_R_array,
                 datapath,
                 syndatapath,
                 tinc,
                 tpad,
                 dt_s,
                 dt_e,
                 n,
                 fac_norm,
                 f_ign,
                 plot_psds=False,
                 plot_ratio_extra=False,
                 plot_m_rat=False,
                 plot_flat_ranges=False,
                 plot_neighb_ranges=False):
    """
    Preparing the PSD calculations and plotting, e.g. making data-piles
    and calling next function if both, synthetic and recorded data is
    in piles.

    :param i_st: number of station - must fit arrival time table!
    :param st: pyrocko station object (current station)
    :param subset_catalog: Catalog to be used (list of pyrocko events)
    :param dir_f: directory to store PSD results at
    :param arrT_array: Arrival times
    :param datapath: Path to restituted (,rotated and downsampled) data
    :param syndatapath: Path to sythetic data
    :param plot_ratio_extra: Should ratios of PSDs be plotted to extra figure?
                             They are displayed within the PSD plot anyway,
                             default is False.

    :returns freq_rat_list: List containing freuqncy ranges at which the
                            ratio between synthetic and observed psd is
                            constant
    :returns nslc_list: nslc list for current station
    """

    st_name = st.station
    st_data_pile = pile.make_pile(datapath,
                                  regex='%s_%s_' % (st.network, st_name),
                                  show_progress=False)
    st_syn_data_pile = pile.make_pile(syndatapath,
                                      regex='%s_%s_' % (st.network, st_name),
                                      show_progress=False)
    freq_rat_list = []
    freq_rat_list_y = []
    nslc_list = []
    # flat_by_next = []
    # flat_by_next_y = []

    if st_data_pile.tmin is not None and st_data_pile.tmax is not None and\
        st_syn_data_pile.tmin is not None\
        and st_syn_data_pile.tmax is not None:

        nsl = st.nsl()

        for cha in ['Z', 'R', 'T']:

            outs = calc_plot_psds(subset_catalog,
                                  st_data_pile,
                                  st_syn_data_pile,
                                  cha,
                                  l,
                                  dir_f,
                                  arrT_array,
                                  arrT_R_array,
                                  nsl,
                                  i_st,
                                  tinc,
                                  tpad,
                                  dt_s,
                                  dt_e,
                                  n,
                                  fac_norm,
                                  f_ign,
                                  plot_psds=plot_psds,
                                  plot_ratio_extra=plot_ratio_extra,
                                  plot_m_rat=plot_m_rat,
                                  plot_flat_ranges=plot_flat_ranges,
                                  plot_neighb_ranges=plot_neighb_ranges)

            ratio_npar = outs[-1]
            f_syn_keep = outs[-2]

            if plot_psds is True and outs[0] and outs[1]:
                plot_psd_from_dict(outs[0], outs[1], st, l, cha,
                                   subset_catalog, dir_f)

                if plot_ratio_extra is True and outs[2]:
                    plot_psdratio_from_dict(outs[2], st, l, cha,
                                            subset_catalog, dir_f)

            elif plot_psds is not True and plot_ratio_extra is True and outs[2]:
                plot_psdratio_from_dict(outs[2], st, l, cha, subset_catalog,
                                        dir_f)

            if ratio_npar.shape != (1, ):
                m_rat = calc_median_ratio(ratio_npar)

                if plot_m_rat is True:
                    plot_m_ratio(m_rat, f_syn_keep, nsl, l, dir_f, cha)

                f, y = const_psd_rat(m_rat,
                                     cha,
                                     st,
                                     l,
                                     f_syn_keep,
                                     n,
                                     fac_norm,
                                     f_ign,
                                     plot_flat_range=plot_flat_ranges,
                                     dir_f=dir_f)

                freq_rat_list.append(f)
                freq_rat_list_y.append(y)
                nslc_list.append('%s.%s.%s.%s' % (nsl[0], nsl[1], l, cha))
            '''
            f2, y2 = flat_by_neighbor_comp(
                m_rat, cha, st, f_syn_keep, dir_f,
                n, fac_norm, f_ign,
                plot_psd_neighbcomp=plot_neighb_ranges)
            flat_by_next.append(f2)
            flat_by_next_y.append(y2)
            '''

    return freq_rat_list, freq_rat_list_y, nslc_list  # ,\
Ejemplo n.º 27
0
from pyrocko import pile, io, util
import time, calendar

# when pile.make_pile() is called without any arguments, the command line
# parameters given to the script are searched for waveform files and directories
p = pile.make_pile()

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm(time.gmtime(p.tmin)[:4] + (0, 0))

tinc = 3600.0
tpad = 10.0
target_deltat = 0.1

# iterate over the data, with a window length of one hour and 2x10 seconds of
# overlap
for traces in p.chopper(tmin=tmin, tinc=tinc, tpad=tpad):

    if traces:  # the list could be empty due to gaps
        for tr in traces:
            tr.downsample_to(target_deltat, snap=True, demean=False)

            # remove overlapping
            tr.chop(tr.wmin, tr.wmax)

        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format="%Y-%m-%d_%H")
        filepath = "downsampled/%(station)s_%(channel)s_%(mytimestring)s.mseed"
        io.save(traces, filepath, additional={"mytimestring": timestring})

Ejemplo n.º 28
0
def snuffle(pile=None, **kwargs):
    '''View pile in a snuffler window.

    :param pile: :py:class:`pile.Pile` object to be visualized
    :param stations: list of `pyrocko.model.Station` objects or ``None``
    :param events: list of `pyrocko.model.Event` objects or ``None``
    :param markers: list of `pyrocko.gui.util.Marker` objects or ``None``
    :param ntracks: float, number of tracks to be shown initially (default: 12)
    :param follow: time interval (in seconds) for real time follow mode or
        ``None``
    :param controls: bool, whether to show the main controls (default:
        ``True``)
    :param opengl: bool, whether to use opengl (default: ``False``)
    :param paths: list of files and directories to search for trace files
    :param pattern: regex which filenames must match
    :param format: format of input files
    :param cache_dir: cache directory with trace meta information
    :param force_cache: bool, whether to use the cache when attribute spoofing
        is active
    :param store_path: filename template, where to store trace data from input
        streams
    :param store_interval: float, time interval (in seconds) between stream
        buffer dumps
    :param want_markers: bool, whether markers should be returned
    :param launch_hook: callback function called before snuffler window is
        shown
    '''
    from .snuffler_app import Snuffler, SnufflerWindow, \
        setup_acquisition_sources, PollInjector

    if pile is None:
        pile = pile.make_pile()

    global app
    if app is None:
        import locale
        locale.setlocale(locale.LC_ALL, 'C')
        app = Snuffler()

    kwargs_load = {}
    for k in ('paths', 'regex', 'format', 'cache_dir', 'force_cache'):
        try:
            kwargs_load[k] = kwargs.pop(k)
        except KeyError:
            pass

    store_path = kwargs.pop('store_path', None)
    store_interval = kwargs.pop('store_interval', 600)
    want_markers = kwargs.pop('want_markers', False)
    launch_hook = kwargs.pop('launch_hook', None)

    win = SnufflerWindow(pile, **kwargs)
    if launch_hook:
        if not isinstance(launch_hook, list):
            launch_hook = [launch_hook]
        for hook in launch_hook:
            hook(win)

    sources = []
    pollinjector = None
    tempdir = None
    if 'paths' in kwargs_load:
        sources.extend(setup_acquisition_sources(kwargs_load['paths']))
        if sources:
            if store_path is None:
                tempdir = tempfile.mkdtemp('', 'snuffler-tmp-')
                store_path = pjoin(
                    tempdir,
                    'trace-%(network)s.%(station)s.%(location)s.%(channel)s.'
                    '%(tmin)s.mseed')
            elif os.path.isdir(store_path):
                store_path = pjoin(
                    store_path,
                    'trace-%(network)s.%(station)s.%(location)s.%(channel)s.'
                    '%(tmin)s.mseed')

            pollinjector = PollInjector(
                pile,
                fixation_length=store_interval,
                path=store_path)

            for source in sources:
                source.start()
                pollinjector.add_source(source)

        win.get_view().load(**kwargs_load)

    if not win.is_closing():
        app.install_sigint_handler()
        app.exec_()
        app.uninstall_sigint_handler()

    for source in sources:
        source.stop()

    if pollinjector:
        pollinjector.fixate_all()

    ret = win.return_tag()

    if want_markers:
        markers = win.get_view().get_markers()

    del win
    gc.collect()

    if tempdir:
        shutil.rmtree(tempdir)

    if want_markers:
        return ret, markers
    else:
        return ret
Ejemplo n.º 29
0
class TestFSU(unittest.TestCase):

    tpile_1 = list(
        pile.make_pile(pjoin(mseeds, 'testFiles/'),
                       show_progress=False).iter_all())
    tpile_2 = list(
        pile.make_pile(pjoin(mseeds, 'testFiles_2/'),
                       show_progress=False).iter_all())

    random_pile_test = list(
        pile.make_pile(pjoin(mseeds, 'testRandomTestFiles/'),
                       show_progress=False).iter_all())
    random_pile_reference = list(
        pile.make_pile(pjoin(mseeds, 'referenceRandomTestFiles/'),
                       show_progress=False).iter_all())

    t_min = util.str_to_time('2010-01-01 22:00:00')
    data1 = np.array([0, 0, 0, 0, 1, 0, 0, 0])
    data2 = np.array([0, 0, 0, 0, -1, 0, 0, 0])
    ttrace_1 = trace.Trace(network='1',
                           station='TEST',
                           channel='z',
                           deltat=0.5,
                           ydata=data1)
    ttrace_2 = trace.Trace(network='2',
                           station='TEST',
                           channel='z',
                           deltat=0.5,
                           ydata=data2)

    def test_misfit_by_samples(self):
        data = np.random.random(100)
        ttrace = trace.Trace(network='1',
                             station='T1',
                             channel='z',
                             deltat=0.5,
                             ydata=data)

        equal_traces_list = [ttrace.get_ydata(), ttrace.get_ydata()]
        self.assertEqual(misfit_by_samples(equal_traces_list), 0,
                         'misfit of same traces is zero')

    def test_misfit_by_samples_squared(self):
        equal_neg_traces_list = [
            self.ttrace_1.get_ydata(),
            self.ttrace_2.get_ydata()
        ]
        self.assertEqual(
            misfit_by_samples(equal_neg_traces_list, square=True), 0,
            'misfit of squared traces is zero if one trace is negative of the other'
        )

    def test_find_matching_traces(self):
        self.assertEqual(
            len(
                find_matching_traces([list(self.tpile_1)],
                                     test_list=self.tpile_2)), 12,
            'did not find all 12 of 12 pairs of traces')

    def test_time_domain_misfit_equal_piles(self):
        self.assertEqual(
            time_domain_misfit(reference_pile=[self.tpile_1],
                               test_list=self.tpile_2), 0,
            'Time Domain Misfit of equal piles is not 0!')

    def test_frequency_domain_misfit_equal_piles(self):
        self.assertEqual(
            frequency_domain_misfit(reference_pile=[self.tpile_1],
                                    test_list=self.tpile_2), 0,
            'Frequency Domain Misfit of equal test_piles is NOT 0')

    def test_time_domain_misfit_UNequal_piles(self):
        self.assertNotEqual(
            time_domain_misfit(reference_pile=[self.random_pile_reference],
                               test_list=self.random_pile_test), 0,
            'Time Domain Misfit of UNequal traces is not 0')

    def test_frequency_domain_misfit_UNequal_piles(self):
        self.assertNotEqual(
            frequency_domain_misfit(
                reference_pile=[self.random_pile_reference],
                test_list=self.random_pile_test), 0,
            'Freq Dom Misfit of unequal piles is 0 but should not be 0')

    def test_equalize_sampling_rate(self):
        '''

        :return:
        '''
        data1 = np.random.random(100)
        data2 = np.random.random(100)
        ttrace_1 = trace.Trace(network='1',
                               station='T1',
                               channel='z',
                               deltat=0.5,
                               ydata=data1)
        ttrace_2 = trace.Trace(network='1',
                               station='T2',
                               channel='z',
                               deltat=0.01,
                               ydata=data2)
        downsample_if_needed([[ttrace_1, ttrace_2]])
        self.assertEqual(ttrace_1.deltat, ttrace_2.deltat,
                         'Equalization of sampling rates unsuccessful!')
        self.assertEqual(ttrace_1.deltat, 0.6,
                         'new sampling rate of ttrace_1 wrong')
        self.assertEqual(ttrace_2.deltat, 0.6,
                         'new sampling rate of ttrace_2 wrong')

    def test_chop_to_same_sample_length(self):
        data1 = np.random.random(100)
        data2 = np.random.random(80)
        [data1, data2] = chop_longer_samples([data1, data2])
        self.assertEqual(np.shape(data1), np.shape(data2),
                         'shape after chopping not equal')
Ejemplo n.º 30
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option(
        "--force",
        dest="force",
        action="store_true",
        default=False,
        help="allow recreation of output <directory>",
    )

    parser.add_option(
        "--debug",
        dest="debug",
        action="store_true",
        default=False,
        help="print debugging information to stderr",
    )

    parser.add_option(
        "--dry-run",
        dest="dry_run",
        action="store_true",
        default=False,
        help="show available stations/channels and exit "
        "(do not download waveforms)",
    )

    parser.add_option(
        "--continue",
        dest="continue_",
        action="store_true",
        default=False,
        help="continue download after a accident",
    )

    parser.add_option(
        "--local-data",
        dest="local_data",
        action="append",
        help="add file/directory with local data",
    )

    parser.add_option(
        "--local-stations",
        dest="local_stations",
        action="append",
        help="add local stations file",
    )

    parser.add_option(
        "--local-responses-resp",
        dest="local_responses_resp",
        action="append",
        help="add file/directory with local responses in RESP format",
    )

    parser.add_option(
        "--local-responses-pz",
        dest="local_responses_pz",
        action="append",
        help="add file/directory with local pole-zero responses",
    )

    parser.add_option(
        "--local-responses-stationxml",
        dest="local_responses_stationxml",
        help="add file with local response information in StationXML format",
    )

    parser.add_option(
        "--window",
        dest="window",
        default="full",
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        "] (time format is YYYY-MM-DD HH:MM:SS)",
    )

    parser.add_option(
        "--out-components",
        choices=["enu", "rtu"],
        dest="out_components",
        default="rtu",
        help="set output component orientations to radial-transverse-up [rtu] "
        "(default) or east-north-up [enu]",
    )

    parser.add_option(
        "--padding-factor",
        type=float,
        default=3.0,
        dest="padding_factor",
        help="extend time window on either side, in multiples of 1/<fmin_hz> "
        "(default: 5)",
    )

    parser.add_option(
        "--credentials",
        dest="user_credentials",
        action="append",
        default=[],
        metavar="SITE,USER,PASSWD",
        help="user credentials for specific site to access restricted data "
        "(this option can be repeated)",
    )

    parser.add_option(
        "--token",
        dest="auth_tokens",
        metavar="SITE,FILENAME",
        action="append",
        default=[],
        help="user authentication token for specific site to access "
        "restricted data (this option can be repeated)",
    )

    parser.add_option(
        "--sites",
        dest="sites",
        metavar="SITE1,SITE2,...",
        #    default='bgr',
        default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080",
        help='sites to query (available: %s, default: "%%default"' %
        ", ".join(g_sites_available),
    )

    parser.add_option(
        "--band-codes",
        dest="priority_band_code",
        metavar="V,L,M,B,H,S,E,...",
        default="V,L,M,B,H,E",
        help="select and prioritize band codes (default: %default)",
    )

    parser.add_option(
        "--instrument-codes",
        dest="priority_instrument_code",
        metavar="H,L,G,...",
        default="H,L,O,",
        help="select and prioritize instrument codes (default: %default)",
    )

    parser.add_option(
        "--radius-min",
        dest="radius_min",
        metavar="VALUE",
        default=0.0,
        type=float,
        help="minimum radius [km]",
    )

    parser.add_option(
        "--tinc",
        dest="tinc",
        metavar="VALUE",
        default=3600.0 * 12.0,
        type=float,
        help="length of seperate saved files in s",
    )

    parser.add_option(
        "--nstations-wanted",
        dest="nstations_wanted",
        metavar="N",
        type=int,
        help="number of stations to select initially",
    )

    (options, args) = parser.parse_args(sys.argv[1:])
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, "debug")
    else:
        util.setup_logging(program_name, "info")

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical("cannot use local responses in PZ and RESP "
                        "format at the same time")
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml,
    ):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical("can only handle local responses from either PZ or "
                        "RESP or StationXML. Cannot yet merge different "
                        "response formats.")
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical("--local-responses-resp can only be used "
                        "when --stations is also given.")
        sys.exit(1)

    try:
        ename = ""
        magnitude = None
        mt = None
        if len(args) == 9:
            time = util.str_to_time(args[0] + " " + args[1])
            lat = float(args[2])
            lon = float(args[3])
            depth = float(args[4]) * km
            iarg = 5

        elif len(args) == 6:
            if args[1].find(":") == -1:
                sname_or_date = None
                lat = float(args[0])
                lon = float(args[1])
                event = None
                time = None
            else:
                sname_or_date = args[0] + " " + args[1]

            iarg = 2

        elif len(args) == 5:
            sname_or_date = args[0]
            iarg = 1

        if len(args) in (6, 5) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical("no event found")
                sys.exit(1)
            elif len(events) > 1:
                logger.critical("more than one event found")
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        event_dir = op.join("data", "events", eventname)
        output_dir = op.join(event_dir, "waveforms")
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time,
            lat=lat,
            lon=lon,
            depth=depth,
            name=ename,
            magnitude=magnitude,
            moment_tensor=mt,
        )

    if options.window == "full":
        if event is None:
            logger.critical("need event for --window=full")
            sys.exit(1)

        low_velocity = 1500.0
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == "p":
        if event is None:
            logger.critical("need event for --window=p")
            sys.exit(1)

        phases = list(map(cake.PhaseDef, "P p".split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error("required phase arrival not found")
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(",")
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    priority_band_code = options.priority_band_code.split(",")
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical("invalid band code: %s" % s)

    priority_instrument_code = options.priority_instrument_code.split(",")
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical("invalid instrument code: %s" % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min * km * cake.m2d,
        maxradius=radius * cake.m2d,
        channel=",".join("?%s?" % s for s in priority_band_code),
    )

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ["M/S", "M", "M/S**2"]

    output_units = "M"

    sites = [x.strip() for x in options.sites.split(",") if x.strip()]
    tinc = options.tinc
    #  for site in sites:
    #     if site not in g_sites_available:
    #        logger.critical('unknown FDSN site: %s' % site)
    #       sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(",")
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(",")
            with open(token_filename, "r") as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical("cannot get token from file: %s" % token_filename)
            sys.exit(1)

    fn_template0 = (
        "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed")

    fn_template_raw = op.join(output_dir, "raw", fn_template0)
    fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed")
    fn_stations_raw = op.join(output_dir, "stations.raw.txt")
    fn_template_rest = op.join(output_dir, "rest", fn_template0)
    fn_commandline = op.join(output_dir, "seigerdown.command")

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                "iris": dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == "geonet":
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(site in g_user_credentials
                                       or site in g_auth_tokens),
                )

            logger.info("downloading channel information (%s)" % site)
            sx = fdsn.station(site=site,
                              format="text",
                              level="channel",
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error("No stations matching given criteria. (%s)" % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}
    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()
            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                nsl_to_station[nsl] = s  # using first site with this station
    logger.info("number of stations found: %i" % len(nsl_to_station))

    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info("number of stations selected: %i" % len(nsls_selected))

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info("time window %i/%i (%s - %s)" %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win),
            )

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = tmax_ + tpad

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)

                    if channel.sample_rate:
                        deltat = 1.0 / channel.sample_rate.value
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        selection.append(nslc + (tmin_req - deltat * 10.0,
                                                 tmax_req + deltat * 10.0))

            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ""
                        if nbatches > 1:
                            sbatch = " (batch %i/%i)" % (
                                (i // neach) + 1, nbatches)

                        logger.info("downloading data (%s)%s" % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            if tr.station == "7869":
                                tr.station = "MOER"
                                tr.network = "LE"
                                tr.location = ""
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        io.save(trs, fn_template_raw_folder)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn("overwriting file %s", fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn("an error occurred while downloading data "
                                    "for channels \n  %s" %
                                    "\n  ".join(".".join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return "" if x == 1 else "s"

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info("selected: %s.%s.%s.%s from site%s %s" %
                        (nslc + (plural_s(len(sites)), "+".join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info("selected (%s): %i channel%s (%i station%s)" % (
                "+".join(sites),
                nchannels,
                plural_s(nchannels),
                nstations,
                plural_s(nstations),
            ))

        logger.info("selected total: %i channel%s (%i station%s)" % (
            nchannels_all,
            plural_s(nchannels_all),
            nstations_all,
            plural_s(nstations_all),
        ))

        logger.info("dry run done.")
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info("downloading response information (%s)" % site)
            sxs[site] = fdsn.station(site=site,
                                     level="response",
                                     selection=selection)
            sited = site

            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            elif site == "http://ws.gpi.kit.edu":
                sited = "kit"
            if site == "http://188.246.25.142:8080":
                sited = "moer"

            sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" %
                                                sited))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site["local"] = set()
        plocal = pile.make_pile(options.local_data, fileformat="detect")
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.station == "7869":
                    tr.station = "MOER"
                    tr.network = "LE"
                    tr.location = ""
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site["local"].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append("local")

    if options.local_responses_pz:
        sxs["local"] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs["local"] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs["local"] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error("no data available")
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0)
    otinc = 3600.0
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            if tr.station == "7869":
                tr.station = "MOER"
                tr.network = "LE"
                tr.location = ""
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units,
                    )

                    break

                except stationxml.NoResponseInformation:
                    failure.append("%s: no response information" % site)

                except stationxml.MultipleResponseInformation:
                    failure.append("%s: multiple response information" % site)

            if response is None:
                failure = ", ".join(failure)

            else:
                failure = ""
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = "trace too short"

            if failure:
                logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap="crossfade_cos")

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    if tr.station == "7869":
                        tr.station = "MOER"
                        tr.network = "LE"
                        tr.location = ""
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s"

    fn_waveforms = op.join(output_dir, "prepared", fn_template1)
    fn_stations = op.join(output_dir, "stations.prepared.txt")
    fn_event = op.join(event_dir, "event.txt")

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn("Cannot downsample %s.%s.%s.%s: %s" %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == "rtu":
            pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z"))
        elif options.out_components == "enu":
            pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z"))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info("prepared waveforms from %i stations" % len(stations))
Ejemplo n.º 31
0
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
Ejemplo n.º 32
0
def run_cont_inversion(cfg_file,tmin_data):


    #read in config file
    #including setup of path structure 
    cfg       = AT.read_in_config_file(cfg_file) 
    cfg['datetime'] = util.time_to_str(tmin_data,format='%Y-%m-%dT%H:%M:%S')

    event_count = 0
    cfg['event_count'] = event_count

    filter_flag = int( float( cfg.get('filter_flag',0) ) )
    bp_hp_freq  = float(cfg.get('bp_lower_corner'))

    pad_length = 1./bp_hp_freq*2
    if filter_flag == 0:
        pad_length = 10


    #------------------------------------------
    #   set files
    #
    cont_temp_dir =op.realpath(op.abspath(op.join(cfg['temporary_directory'],'cont_run'))) 
    if not op.isdir(cont_temp_dir):
        os.makedirs(cont_temp_dir)
    cfg['temporary_directory']  = cont_temp_dir 

    # set file for 'time - VR' history
    VR_history_file = op.realpath(op.abspath(op.join(cfg['temporary_directory'],'VR_over_time.txt')))
    open( VR_history_file,'w').close()
    cfg['vr_history_file'] = VR_history_file

    # set file for logging
    log_file = op.realpath(op.abspath(op.join(cfg['temporary_directory'],'arctic_log_file.log')))
    open(log_file,'w').close()
    cfg['log_file'] = log_file
 
    # set file for storing last config dictionary for debugging 
    cfg_pickle_file =  op.realpath(op.abspath(op.join(cfg['temporary_directory'],'cfg_pickle.pp')))
    open(cfg_pickle_file,'w').close()
    cfg['cfg_pickle_file'] = cfg_pickle_file

    # set file for storing last solution dictionary for debugging 
    solution_pickle_file =  op.realpath(op.abspath(op.join(cfg['temporary_directory'],'cfg_pickle.pp')))
    open(solution_pickle_file,'w').close()
    cfg['solution_pickle_file'] = solution_pickle_file
    #------------------------------------------    # set time parameters
    #
    #safety time delay in seconds
    #data are analysed maximal to the current time minus the latency
    # allows small buffer for incoming data in real data test case
    latency = 5.

    # length of analysis window
    wlen = float(cfg['time_window_length'])

    #minimum window step size, avoiding too high cpu load
    minstep = float(cfg['window_steps_in_s'])

    # inintialise window end time
    tlast = None


    #------------------------------------------
    # set other parameters
    #
    print 'setting parameters (time, files, coordinates, etc.)...'
    if not AT.set_general_parameters(cfg):
        print 'ERROR! could not set up parameters'
        raise 
    print '...done \n '

    print 'setting GF... \n'
    if not   AT.read_in_GF(cfg):
        print 'ERROR! could not set up GF array' 
        exit()
    print '...GF ready \n'

   
    if not AT.setup_A(cfg):
        print 'ERROR! could not set up correlation matrix A'
        raise
    if not AT.calc_inv_A(cfg):
        print 'ERROR! could not set up inverted matrix inv_A'
        raise
    print '...done \n'
   

    #------------------------------------------
    #
    #define data pile from given file location
    p = pile.make_pile(cfg['data_dir'])
    
    # read in GFs
    if not AT.read_in_GF(cfg):
        exit('ERROR in reading GFs')

    #------------------------------------------
        
    #d_break(locals(), 'last exit before while loop')

    #------------------------------------------

    #set current real time as basis time 
    tmin_real = time.time()

    #------------------------------------------
    #start infinite run
    while True:        
        # check, if enough time has pased since last loop run
        while True:
            # define internal time
            tfake = ( time.time() - (tmin_real-tmin_data))
            # either beginning of analysis or minimal time for window movement has passed
            if tlast is None or tfake - tlast > minstep:
                break
            
            #wait time, needed to fill minumum window step length
            time.sleep(minstep-(tfake-tlast))

        print 'real_time: %s -- data_time: %s'%(util.time_to_str(time.time()),util.time_to_str(tfake) )

        #set temporary analysis window
        tmin = tfake - latency - wlen
        tmax = tfake - latency + p.get_deltats()[0]
        cfg['current_window_starttime'] = tmin
 
        print 'analysing from %s  --  %s'%(util.time_to_str(tmin),util.time_to_str(tmax)) 
        
        #read in all data traces, available for this window 
        try:
            traces = p.all( tmin=tmin, tmax=tmax, want_incomplete=False ,tpad = pad_length)
            
            for t in traces:
                restitute_t(t,cfg,bp_hp_freq)
        except Exception, e:
            sys.stderr.write( '%s' % e )
            pass

        #d_break(locals(),'%s'%(util.time_to_str(tfake)))
        # analyse the data
        #try:
        analysis_4_given_window(traces,cfg)
        #except Exception, e:
        #    sys.stderr.write( '%s' % e )

        #save current time stamp for next loop run
        tlast = tfake
Ejemplo n.º 33
0
def _get_pile(conf, pathconf):
    fn_pattern = conf.path(pathconf)
    fns = glob.glob(fn_pattern % Stars())
    return pile.make_pile(fns, show_progress=False)
Ejemplo n.º 34
0
from pyrocko import trace
from pyrocko import model
from pyrocko import pile
from pyrocko import gui_util
from pyrocko.gf import LocalEngine
from autogain import autogain, util_optic

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
km = 1000.

if __name__ == '__main__':

    # Where is your data stored?:
    datapath = '/media/usb/webnet/pole_zero/restituted_displacement/2013Mar'
    data_pile = pile.make_pile(datapath)

    # And the stations:
    stations = model.load_stations('/media/usb/webnet/meta/stations.pf')

    # Station code of the trace you want to scale agains:
    reference_id = 'NKC'

    # Frequency band to use:
    fband = {'order': 4, 'corner_hp': 1.0, 'corner_lp': 4.}

    # And a taper to avoid filtering artefacts.
    taper = trace.CosFader(xfrac=0.25)

    # Define a window to chop traces. In this case a static length of 20
    # seconds will be used and the synthetic phase arrival will be in the
Ejemplo n.º 35
0
def snuffle(pile=None, **kwargs):
    '''View pile in a snuffler window.

    :param pile: :py:class:`pile.Pile` object to be visualized
    :param stations: list of `pyrocko.model.Station` objects or ``None``
    :param events: list of `pyrocko.model.Event` objects or ``None``
    :param markers: list of `pyrocko.gui.util.Marker` objects or ``None``
    :param ntracks: float, number of tracks to be shown initially (default: 12)
    :param follow: time interval (in seconds) for real time follow mode or
        ``None``
    :param controls: bool, whether to show the main controls (default:
        ``True``)
    :param opengl: bool, whether to use opengl (default: ``False``)
    :param paths: list of files and directories to search for trace files
    :param pattern: regex which filenames must match
    :param format: format of input files
    :param cache_dir: cache directory with trace meta information
    :param force_cache: bool, whether to use the cache when attribute spoofing
        is active
    :param store_path: filename template, where to store trace data from input
        streams
    :param store_interval: float, time interval (in seconds) between stream
        buffer dumps
    :param want_markers: bool, whether markers should be returned
    :param launch_hook: callback function called before snuffler window is
        shown
    '''
    from .snuffler_app import Snuffler, SnufflerWindow, \
        setup_acquisition_sources, PollInjector

    if pile is None:
        pile = pile.make_pile()

    global app
    if app is None:
        import locale
        locale.setlocale(locale.LC_ALL, 'C')
        app = Snuffler()

    kwargs_load = {}
    for k in ('paths', 'regex', 'format', 'cache_dir', 'force_cache'):
        try:
            kwargs_load[k] = kwargs.pop(k)
        except KeyError:
            pass

    store_path = kwargs.pop('store_path', None)
    store_interval = kwargs.pop('store_interval', 600)
    want_markers = kwargs.pop('want_markers', False)
    launch_hook = kwargs.pop('launch_hook', None)

    win = SnufflerWindow(pile, **kwargs)
    if launch_hook:
        launch_hook(win)

    sources = []
    pollinjector = None
    tempdir = None
    if 'paths' in kwargs_load:
        sources.extend(setup_acquisition_sources(kwargs_load['paths']))
        if sources:
            if store_path is None:
                tempdir = tempfile.mkdtemp('', 'snuffler-tmp-')
                store_path = pjoin(
                    tempdir,
                    'trace-%(network)s.%(station)s.%(location)s.%(channel)s.'
                    '%(tmin)s.mseed')
            elif os.path.isdir(store_path):
                store_path = pjoin(
                    store_path,
                    'trace-%(network)s.%(station)s.%(location)s.%(channel)s.'
                    '%(tmin)s.mseed')

            pollinjector = PollInjector(
                pile,
                fixation_length=store_interval,
                path=store_path)

            for source in sources:
                source.start()
                pollinjector.add_source(source)

        win.get_view().load(**kwargs_load)

    if not win.is_closing():
        app.install_sigint_handler()
        app.exec_()
        app.uninstall_sigint_handler()

    for source in sources:
        source.stop()

    if pollinjector:
        pollinjector.fixate_all()

    ret = win.return_tag()

    if want_markers:
        markers = win.get_view().get_markers()

    del win
    gc.collect()

    if tempdir:
        shutil.rmtree(tempdir)

    if want_markers:
        return ret, markers
    else:
        return ret
Ejemplo n.º 36
0
def prep_orient(datapath,
                st,
                loc,
                catalog,
                dir_ro,
                v_rayleigh,
                bp,
                dt_start,
                dt_stop,
                ccmin=0.80,
                plot_heatmap=False,
                plot_distr=False,
                debug=False):
    """
    Perform orientation analysis using Rayleigh waves, main function.

    time wdw: 20s before 4.0km/s arrival and 600 s afterwards
    (Stachnik et al. 2012)
    - compute radial component for back values of 0 to 360 deg
    - for each c-c of hilbert(R) with Z comp.
    - call plotting functions and/or write results to file

    :param datapath: path to rrd data
    :param st: current station (pyrocko station object)
    :param catalog: list of pyrocko events used for analysis
    :param dir_ro: output directory
    :param plot_heatmap: bool, optional
    :param plot_distr: bool, optional
    """
    logs = logging.getLogger('prep_orient')
    st_data_pile = pile.make_pile(datapath,
                                  regex='%s_%s_' % (st.network, st.station),
                                  show_progress=False)
    n_ev = len(catalog)

    if st_data_pile.tmin is not None and st_data_pile.tmax is not None:

        # calculate dist between all events and current station
        r_arr_by_ev = num.empty(n_ev)
        ev_lats = num.asarray([ev.lat for ev in catalog])
        ev_lons = num.asarray([ev.lon for ev in catalog])
        dists = distance_accurate50m_numpy(a_lats=ev_lats,
                                           a_lons=ev_lons,
                                           b_lats=st.lat,
                                           b_lons=st.lon,
                                           implementation='c')
        r_arr_by_ev = (dists / 1000.) / v_rayleigh
        cc_i_ev_vs_rota = num.empty((n_ev, 360))
        rot_angles = range(-180, 180, 1)
        for i_ev, ev in enumerate(catalog):
            arrT = ev.time + r_arr_by_ev[i_ev]

            start_twd1 = ev.time
            end_twd1 = arrT + 1800

            trZ = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'Z')
            trR = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'R')
            trT = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'T')

            start_twd2 = ev.time + r_arr_by_ev[i_ev] - dt_start
            end_twd2 = arrT + dt_stop

            if len(trZ) == 1 and len(trR) == 1 and len(trT) == 1:
                trZ = trZ[0]
                trR = trR[0]
                trT = trT[0]
                # debugging - window selection:
                if debug is True:
                    trace.snuffle([trZ, trR, trT],
                                  markers=[
                                      pm.Marker(nslc_ids=[
                                          trZ.nslc_id, trR.nslc_id, trT.nslc_id
                                      ],
                                                tmin=start_twd2,
                                                tmax=end_twd2),
                                      pm.Marker(nslc_ids=[
                                          trZ.nslc_id, trR.nslc_id, trT.nslc_id
                                      ],
                                                tmin=arrT,
                                                tmax=arrT + 3)
                                  ])

            else:
                cc_i_ev_vs_rota[i_ev, :] = num.nan
                continue

            try:
                trZ.bandpass(bp[0], bp[1], bp[2])
                trZ.chop(tmin=start_twd2, tmax=end_twd2)
            except trace.NoData:
                logs.warning('no data %s %s %s' % (trZ, trR, trT))
                continue

            for i_r, r in enumerate(rot_angles):
                print('rotation angle [deg]: %5d' % r, end='\r')
                rot_2, rot_3 = trace.rotate(traces=[trR, trT],
                                            azimuth=r,
                                            in_channels=['R', 'T'],
                                            out_channels=['2', '3'])
                rot_2_y = rot_2.ydata
                rot_2_hilb = num.imag(trace.hilbert(rot_2_y, len(rot_2_y)))
                rot_2_hilb_tr = trace.Trace(deltat=rot_2.deltat,
                                            ydata=rot_2_hilb,
                                            tmin=rot_2.tmin)
                # problem: rot_2 and rot_2_hilb look exactly the same!
                # --> no phase shift. why? should be num.imag!!!
                # trace.snuffle([rot_2, rot_2_hilb_tr])
                rot_2_hilb_tr.bandpass(bp[0], bp[1], bp[2])
                rot_2_hilb_tr.chop(tmin=start_twd2, tmax=end_twd2)

                # if st.station == 'RORO' and r == 0:
                #     trace.snuffle([rot_2_hilb_tr, trZ])
                # normalize traces
                trZ.ydata /= abs(max(trZ.ydata))
                rot_2_hilb_tr.ydata /= abs(max(rot_2_hilb_tr.ydata))

                c = trace.correlate(trZ,
                                    rot_2_hilb_tr,
                                    mode='valid',
                                    normalization='normal')

                t, coef = c.max()
                t2, coef2 = max_or_min(c)
                '''
                if st.station == 'MATE' and r == 0:
                    print(i_ev, ev.name, ev.depth)
                    print(r, t, coef, t2, coef2)
                    trace.snuffle([trZ, trR, rot_2_hilb_tr])
                '''
                cc_i_ev_vs_rota[i_ev, i_r] = coef
        '''
        if st.station == 'MATE':
            for i_ev in range(n_ev):
                print(num.argmax(cc_i_ev_vs_rota[i_ev,:]),
                      num.max(cc_i_ev_vs_rota[i_ev,:]))
        '''

        if plot_heatmap is True:
            fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 2))

            cax = ax.imshow(cc_i_ev_vs_rota,
                            interpolation='nearest',
                            vmin=-1.0,
                            vmax=1.0,
                            aspect='auto',
                            extent=[-180, 180, n_ev, 0],
                            cmap='binary')
            ax.set_ylabel('i_ev')
            ax.set_xlabel('Correction angle (deg)')
            ax.set_title('%s %s' % (st.network, st.station))
            cbar = fig.colorbar(cax,
                                ticks=[0, 0.5, 1.0],
                                orientation='horizontal',
                                fraction=0.05,
                                pad=0.5)
            cbar.ax.set_xticklabels(['0', '0.5', '1.0'])
            plt.tight_layout()
            # plt.show(fig)
            fig.savefig(
                os.path.join(
                    dir_ro, '%s_%s_%s_rot_cc_heatmap.png' %
                    (st.network, st.station, loc)))
            plt.close()

        if plot_distr is True:
            plot_ccdistr_each_event(cc_i_ev_vs_rota, catalog, rot_angles, st,
                                    loc, dir_ro)

        median_a, mean_a, std_a, switched, n_ev =\
            get_m_angle_switched(cc_i_ev_vs_rota, catalog, st, ccmin)

        dict_ev_angle = get_m_angle_all(cc_i_ev_vs_rota, catalog, st, ccmin)

        return median_a, mean_a, std_a, switched, dict_ev_angle, n_ev
Ejemplo n.º 37
0
# Download example data
get_example_data('data_conversion', recursive=True)

input_path = 'data_conversion/mseed'
output_path = 'data_conversion/sac/' \
        '%(dirhz)s/%(station)s_%(channel)s_%(tmin)s.sac'

fn_stations = 'data_conversion/stations.txt'

stations_list = model.load_stations(fn_stations)

stations = {}
for s in stations_list:
    stations[s.network, s.station, s.location] = s
    s.set_channels_by_name(*'BHN BHE BHZ BLN BLE BLZ'.split())

p = pile.make_pile(input_path)
h = 3600.
tinc = 1 * h
tmin = util.day_start(p.tmin)
for traces in p.chopper_grouped(tmin=tmin,
                                tinc=tinc,
                                gather=lambda tr: tr.nslc_id):
    for tr in traces:
        dirhz = '%ihz' % int(round(1. / tr.deltat))
        io.save([tr],
                output_path,
                format='sac',
                additional={'dirhz': dirhz},
                stations=stations)
Ejemplo n.º 38
0
from pyrocko import pile, io, util
import time, calendar

# when pile.make_pile() is called without any arguments, the command line 
# parameters given to the script are searched for waveform files and directories
p = pile.make_pile()

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm( time.gmtime(p.tmin)[:4] + ( 0, 0 ) )

tinc = 3600.
tpad = 10.
target_deltat = 0.1

# iterate over the data, with a window length of one hour and 2x10 seconds of
# overlap
for traces in p.chopper(tmin=tmin, tinc=tinc, tpad=tpad):
    
    if traces: # the list could be empty due to gaps
        for tr in traces:
            tr.downsample_to(target_deltat, snap=True, demean=False)
            
            # remove overlapping
            tr.chop(tr.wmin, tr.wmax)
        
        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H')
        filepath = 'downsampled/%(station)s_%(channel)s_%(mytimestring)s.mseed'
        io.save(traces, filepath, additional={'mytimestring': timestring})

Ejemplo n.º 39
0
from pyrocko import pile, io, util
import time
import calendar
''' Chope a pile of waveform traces into segments '''

p = pile.make_pile(['test.mseed'])

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm(time.gmtime(p.tmin)[:4] + (0, 0))

# iterate over the data, with a window length of one hour
for traces in p.chopper(tmin=tmin, tinc=3600):
    if traces:    # the list could be empty due to gaps
        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H')
        filepath = 'test_hourfiles/hourfile-%s.mseed' % timestring
        io.save(traces, filepath)
# Download example data
get_example_data('data_conversion', recursive=True)

input_path = 'data_conversion/mseed'
output_path = 'data_conversion/sac/' \
        '%(dirhz)s/%(station)s_%(channel)s_%(tmin)s.sac'

fn_stations = 'data_conversion/stations.txt'

stations_list = model.load_stations(fn_stations)

stations = {}
for s in stations_list:
    stations[s.network, s.station, s.location] = s
    s.set_channels_by_name(*'BHN BHE BHZ BLN BLE BLZ'.split())

p = pile.make_pile(input_path)
h = 3600.
tinc = 1*h
tmin = util.day_start(p.tmin)
for traces in p.chopper_grouped(tmin=tmin, tinc=tinc,
                                gather=lambda tr: tr.nslc_id):
    for tr in traces:
        dirhz = '%ihz' % int(round(1./tr.deltat))
        io.save(
            [tr], output_path,
            format='sac',
            additional={'dirhz': dirhz},
            stations=stations
        )