Exemplo n.º 1
0
def save_all(events, eventsclusters, clusters, conf, resdir):
    '''
    Save all results of the clustering analysis
    '''

    # save clustered events
    fn = os.path.join(resdir, 'clustered_events.pf')
    if os.path.isfile(fn):
        os.remove(fn)
    for iev, ev in enumerate(events):
        tag = 'cluster:' + str(eventsclusters[iev])
        if ev.tags is not None:
            ev.tags.append(tag)
        else:
            ev.tags = [tag]
    model.dump_events(events, fn)

    # save events of each cluster
    for cluster_id in clusters:
        wished_events = []
        fn = os.path.join(resdir, 'cluster.' + str(cluster_id) + '.events.pf')
        for iev, evcl in enumerate(eventsclusters):
            if evcl == cluster_id:
                wished_events.append(events[iev])
        model.dump_events(wished_events, fn)

    # save clusters
    fn = os.path.join(resdir, 'processed.clusters')
    save_obj(clusters, fn)

    # save eventsclusters
    fn = os.path.join(resdir, 'processed.eventsclusters')
    save_obj(eventsclusters, fn)
Exemplo n.º 2
0
    def dump(x, gm, indices):
        if type == 'vector':
            print(' ',
                  ' '.join('%16.7g' % problem.extract(x, i) for i in indices),
                  '%16.7g' % gm,
                  file=out)

        elif type == 'source':
            source = problem.get_source(x)
            if effective_lat_lon:
                source.set_origin(*source.effective_latlon)
            guts.dump(source, stream=out)

        elif type == 'event':
            ev = problem.get_source(x).pyrocko_event()
            if effective_lat_lon:
                ev.set_origin(*ev.effective_latlon)

            model.dump_events([ev], stream=out)

        elif type == 'event-yaml':
            ev = problem.get_source(x).pyrocko_event()
            if effective_lat_lon:
                ev.set_origin(*ev.effective_latlon)
            guts.dump_all([ev], stream=out)

        else:
            raise GrondError('Invalid argument: type=%s' % repr(type))
Exemplo n.º 3
0
    def testEventExtras(self):
        tempdir = self.make_tempdir()

        eextra = model.Event(lat=12., lon=12.)
        data = [
            (dict(i=1, f=1.0, n=None, b=True, s='abc', e=eextra), None),
            ({1: 'abc'}, guts.ValidationError),
            ({'e': model.Event(lat=1, lon=1)}, guts.ValidationError)]

        for d, exc in data:
            ev1 = model.Event(
                lat=10.,
                lon=11.,
                depth=4000.,
                magnitude=5.,
                extras=d)

            fn = pjoin(tempdir, 'test.events')
            with self.assertRaises(model.EventExtrasDumpError):
                model.dump_events([ev1], fn)

            if exc is None:
                ev1.validate()
                ev2 = guts.load(string=ev1.dump())
                for k in d:
                    assert isinstance(ev2.extras[k], type(d[k]))

            else:
                with self.assertRaises(exc):
                    ev1.validate()
    def call(self):
        markers = self.get_selected_event_markers()
        events = [m.get_event() for m in markers]
        if len(events) == 0:
            self.fail('no events found')

        out_filename = self.output_filename('Template for output files')
        model.dump_events(events, filename=out_filename)
    def call(self):
        markers = self.get_selected_event_markers()
        events = [m.get_event() for m in markers]
        if len(events) == 0:
            self.fail('no events found')

        out_filename = self.output_filename('Template for output files')
        model.dump_events(events, filename=out_filename)
Exemplo n.º 6
0
def save(synthetic_traces, event, stations, savedir, noise_events=False):
    model.dump_stations(stations, savedir + 'model.txt')
    io.save(synthetic_traces, savedir + 'traces.mseed')
    model.dump_events(event, savedir + 'event.txt')
    model.dump_stations(stations, savedir + 'stations.pf')
    st_xml = stationxml.FDSNStationXML.from_pyrocko_stations(
        stations, add_flat_responses_from='M')
    st_xml.dump_xml(filename=savedir + 'stations.xml')
    if noise_events is not False:
        model.dump_events(noise_events, savedir + 'events_noise.txt')
Exemplo n.º 7
0
def command_init(args):
    '''
    Execution of command init
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

    parser, options, args = cl_parse('init', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if ((not options.force) and (os.path.isdir(conf.project_dir))):
        die('project dir exists: %s; use force option' % conf.project_dir)
    else:
        if os.path.isdir(conf.project_dir):
            shutil.rmtree(conf.project_dir)
        os.mkdir(conf.project_dir)
        conf.dump(filename=os.path.join(conf.project_dir, 'seiscloud.config'))

        dst = os.path.join(conf.project_dir, 'catalog.pf')

        if conf.catalog_origin == 'file':
            src = conf.catalog_fn
            shutil.copyfile(src, dst)
        else:
            if conf.catalog_origin == 'globalcmt':
                orig_catalog = catalog.GlobalCMT()
            else:  # geofon
                orig_catalog = catalog.Geofon()
            events = orig_catalog.get_events(time_range=(util.str_to_time(
                conf.tmin), util.str_to_time(conf.tmax)),
                                             magmin=conf.magmin,
                                             latmin=conf.latmin,
                                             latmax=conf.latmax,
                                             lonmin=conf.lonmin,
                                             lonmax=conf.lonmax)

            selevents = [ev for ev in events if ev.magnitude <= conf.magmax]
            model.dump_events(selevents, dst)

        print('Project directory prepared "%s"' % conf.project_dir)
Exemplo n.º 8
0
def save_all(events,eventsclusters,clusters,conf,resdir):
    '''
    Save all results of the clustering analysis
    '''

    # save events of each cluster
    for cluster_id in clusters:
        wished_events=[]
        fn=os.path.join(resdir,'cluster.'+str(cluster_id)+'.events.pf')
        for iev,evcl in enumerate(eventsclusters):
            if evcl==cluster_id:
                wished_events.append(events[iev])
        model.dump_events(wished_events,fn)

    # save clusters
    fn=os.path.join(resdir,'processed.clusters')
    save_obj(clusters, fn)

    # save eventsclusters
    fn=os.path.join(resdir,'processed.eventsclusters')
    save_obj(eventsclusters, fn)
Exemplo n.º 9
0
from pyrocko.client import fdsn
from pyrocko import util, io, trace, model
from pyrocko.io import quakeml

tmin = util.stt('2014-01-01 16:10:00.000')
tmax = util.stt('2014-01-01 16:39:59.000')

# request events at IRIS for the given time span
request_event = fdsn.event(site='iris', starttime=tmin, endtime=tmax)

# parse QuakeML and extract pyrocko events
events = quakeml.QuakeML.load_xml(request_event).get_pyrocko_events()
model.dump_events(events, 'iris-events.pf')

# select stations by their NSLC id and wildcards (asterisk) for waveform
# download
selection = [
    ('*', 'HMDT', '*', '*', tmin, tmax),  # all available components
    ('GE', 'EIL', '*', '*Z', tmin, tmax),  # all vertical components
]

# Restricted access token
# token = open('token.asc', 'rb').read()
# request_waveform = fdsn.dataselect(site='geofon', selection=selection,
#                                    token=token)

# setup a waveform data request
request_waveform = fdsn.dataselect(site='geofon', selection=selection)

# write the incoming data stream to 'traces.mseed'
with open('traces.mseed', 'wb') as file:
Exemplo n.º 10
0
from pyrocko import util, model
from pyrocko.client import catalog

tmin = util.str_to_time ('2017-01-02 13:00:00')
tmax = util.str_to_time ('2017-01-02 14:00:00')

# create an instance of the global CMT catalog
global_cmt_catalog = catalog.GlobalCMT()

# query the catalog
events = global_cmt_catalog.get_events(
    time_range=(tmin, tmax),
    magmin=4.0,
    latmin=-5.0,
    latmax=-4.0,
    lonmin=-77,
    lonmax=-76)
	
# dump events to catalog
model.dump_events(events, 'GCMT.EQ.txt')
Exemplo n.º 11
0
def search(config,
           override_tmin=None,
           override_tmax=None,
           show_detections=False,
           show_movie=False,
           show_window_traces=False,
           force=False,
           stop_after_first=False,
           nparallel=6,
           save_imax=False,
           bark=False):

    fp = config.expand_path

    run_path = fp(config.run_path)

    # if op.exists(run_path):
    #     if force:
    #         shutil.rmtree(run_path)
    #     else:
    #         raise common.LassieError(
    #             'run directory already exists: %s' %
    #             run_path)

    util.ensuredir(run_path)

    write_config(config, op.join(run_path, 'config.yaml'))

    ifm_path_template = config.get_ifm_path_template()
    detections_path = config.get_detections_path()
    events_path = config.get_events_path()
    figures_path_template = config.get_figures_path_template()

    config.setup_image_function_contributions()
    ifcs = config.image_function_contributions

    grid = config.get_grid()
    receivers = config.get_receivers()

    norm_map = gridmod.geometrical_normalization(grid, receivers)

    data_paths = fp(config.data_paths)
    for data_path in fp(data_paths):
        if not op.exists(data_path):
            pass

    p = pile.make_pile(data_paths, fileformat='detect')
    if p.is_empty():
        raise common.LassieError('no usable waveforms found')

    for ifc in ifcs:
        ifc.prescan(p)

    shift_tables = []
    tshift_minmaxs = []
    for ifc in ifcs:
        shift_tables.append(ifc.get_table(grid, receivers))
        tshift_minmaxs.append(num.nanmin(shift_tables[-1]))
        tshift_minmaxs.append(num.nanmax(shift_tables[-1]))

    fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)

    tshift_min = min(tshift_minmaxs)
    tshift_max = max(tshift_minmaxs)

    if config.detector_tpeaksearch is not None:
        tpeaksearch = config.detector_tpeaksearch
    else:
        tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min

    tpad = max(ifc.get_tpad() for ifc in ifcs) + \
        (tshift_max - tshift_min) + tpeaksearch

    tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad
    tavail = p.tmax - p.tmin
    tinc = min(tinc, tavail - 2.0 * tpad)

    if tinc <= 0:
        raise common.LassieError('available waveforms too short \n'
                                 'required: %g s\n'
                                 'available: %g s\n' % (2. * tpad, tavail))

    blacklist = set(tuple(s.split('.')) for s in config.blacklist)
    whitelist = set(tuple(s.split('.')) for s in config.whitelist)

    distances = grid.distances(receivers)
    distances_to_grid = num.min(distances, axis=0)

    distance_min = num.min(distances)
    distance_max = num.max(distances)

    station_index = dict(
        (rec.codes, i) for (i, rec) in enumerate(receivers)
        if rec.codes not in blacklist and (
            not whitelist or rec.codes in whitelist) and (
                config.distance_max is None
                or distances_to_grid[i] <= config.distance_max))

    check_data_consistency(p, config)

    deltat_cf = max(p.deltats.keys())
    assert deltat_cf > 0.0

    while True:
        if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs):
            break

        deltat_cf *= 2
    logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' %
                (deltat_cf, 1.0 / deltat_cf))

    ngridpoints = grid.size()

    logger.info('number of grid points: %i' % ngridpoints)
    logger.info('minimum source-receiver distance: %g m' % distance_min)
    logger.info('maximum source-receiver distance: %g m' % distance_max)
    logger.info('minimum travel-time: %g s' % tshift_min)
    logger.info('maximum travel-time: %g s' % tshift_max)

    idetection = 0

    tmin = override_tmin or config.tmin or p.tmin + tpad
    tmax = override_tmax or config.tmax or p.tmax - tpad

    events = config.get_events()
    twindows = []
    if events is not None:
        for ev in events:
            if tmin <= ev.time <= tmax:
                twindows.append(
                    (ev.time + tshift_min - (tshift_max - tshift_min) *
                     config.event_time_window_factor,
                     ev.time + tshift_min + (tshift_max - tshift_min) *
                     config.event_time_window_factor))

    else:
        twindows.append((tmin, tmax))

    for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows):

        nwin = int(math.ceil((tmax_win - tmin_win) / tinc))

        logger.info('start processing time window group %i/%i: %s - %s' %
                    (iwindow_group + 1, len(twindows),
                     util.time_to_str(tmin_win), util.time_to_str(tmax_win)))

        logger.info('number of time windows: %i' % nwin)
        logger.info('time window length: %g s' % (tinc + 2.0 * tpad))
        logger.info('time window payload: %g s' % tinc)
        logger.info('time window padding: 2 x %g s' % tpad)
        logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad /
                                                   (tinc + 2.0 * tpad)))

        iwin = -1

        for trs in p.chopper(
                tmin=tmin_win,
                tmax=tmax_win,
                tinc=tinc,
                tpad=tpad,
                want_incomplete=config.fill_incomplete_with_zeros,
                trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
            iwin += 1
            trs_ok = []
            for tr in trs:
                if tr.ydata.size == 0:
                    logger.warn('skipping empty trace: %s.%s.%s.%s' %
                                tr.nslc_id)

                    continue

                if not num.all(num.isfinite(tr.ydata)):
                    logger.warn('skipping trace because of invalid values: '
                                '%s.%s.%s.%s' % tr.nslc_id)

                    continue

                trs_ok.append(tr)

            trs = trs_ok

            if not trs:
                continue

            logger.info('processing time window %i/%i: %s - %s' %
                        (iwin + 1, nwin, util.time_to_str(
                            trs[0].wmin), util.time_to_str(trs[0].wmax)))

            wmin = trs[0].wmin
            wmax = trs[0].wmax

            if config.fill_incomplete_with_zeros:
                trs = zero_fill(trs, wmin - tpad, wmax + tpad)

            t0 = math.floor(wmin / deltat_cf) * deltat_cf
            iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf))
            iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf))
            lengthout = iwmax - iwmin + 1

            pdata = []
            trs_debug = []
            parstack_params = []
            for iifc, ifc in enumerate(ifcs):
                dataset = ifc.preprocess(trs, wmin - tpeaksearch,
                                         wmax + tpeaksearch,
                                         tshift_max - tshift_min, deltat_cf)
                if not dataset:
                    continue

                nstations_selected = len(dataset)

                nsls_selected, trs_selected = zip(*dataset)

                for tr in trs_selected:
                    tr.meta = {'tabu': True}

                trs_debug.extend(trs + list(trs_selected))

                istations_selected = num.array(
                    [station_index[nsl] for nsl in nsls_selected],
                    dtype=num.int)
                arrays = [tr.ydata.astype(num.float) for tr in trs_selected]

                offsets = num.array([
                    int(round((tr.tmin - t0) / deltat_cf))
                    for tr in trs_selected
                ],
                                    dtype=num.int32)

                w = ifc.get_weights(nsls_selected)

                weights = num.ones((ngridpoints, nstations_selected))
                weights *= w[num.newaxis, :]
                weights *= ifc.weight

                shift_table = shift_tables[iifc]

                ok = num.isfinite(shift_table[:, istations_selected])
                bad = num.logical_not(ok)

                shifts = -num.round(shift_table[:, istations_selected] /
                                    deltat_cf).astype(num.int32)

                weights[bad] = 0.0
                shifts[bad] = num.max(shifts[ok])

                pdata.append((list(trs_selected), shift_table, ifc))
                parstack_params.append((arrays, offsets, shifts, weights))

            if config.stacking_blocksize is not None:
                ipstep = config.stacking_blocksize
                frames = None
            else:
                ipstep = lengthout
                frames = num.zeros((ngridpoints, lengthout))

            twall_start = time.time()
            frame_maxs = num.zeros(lengthout)
            frame_argmaxs = num.zeros(lengthout, dtype=num.int)
            ipmin = iwmin
            while ipmin < iwmin + lengthout:
                ipsize = min(ipstep, iwmin + lengthout - ipmin)
                if ipstep == lengthout:
                    frames_p = frames
                else:
                    frames_p = num.zeros((ngridpoints, ipsize))

                for (arrays, offsets, shifts, weights) in parstack_params:
                    frames_p, _ = parstack(arrays,
                                           offsets,
                                           shifts,
                                           weights,
                                           0,
                                           offsetout=ipmin,
                                           lengthout=ipsize,
                                           result=frames_p,
                                           nparallel=nparallel,
                                           impl='openmp')

                if config.sharpness_normalization:
                    frame_p_maxs = frames_p.max(axis=0)
                    frame_p_means = num.abs(frames_p).mean(axis=0)
                    frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :]
                    frames_p *= norm_map[:, num.newaxis]

                if config.ifc_count_normalization:
                    frames_p *= 1.0 / len(ifcs)

                frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    frames_p.max(axis=0)
                frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    pargmax(frames_p)

                ipmin += ipstep
                del frames_p

            twall_end = time.time()

            logger.info('wallclock time for stacking: %g s' %
                        (twall_end - twall_start))

            tmin_frames = t0 + iwmin * deltat_cf

            tr_stackmax = trace.Trace('',
                                      'SMAX',
                                      '',
                                      '',
                                      tmin=tmin_frames,
                                      deltat=deltat_cf,
                                      ydata=frame_maxs)

            tr_stackmax.meta = {'tabu': True}

            trs_debug.append(tr_stackmax)

            if show_window_traces:
                trace.snuffle(trs_debug)

            ydata_window = tr_stackmax.chop(wmin, wmax,
                                            inplace=False).get_ydata()

            logger.info('CF stats: min %g, max %g, median %g' %
                        (num.min(ydata_window), num.max(ydata_window),
                         num.median(ydata_window)))
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s'
                    % nstations_selected)

            detector_threshold_seiger = config.detector_threshold - (
                (17 - nstations_selected) * 4
            )  # 17 is maximum number of seiger stations, 4 is a mean baseline for noise
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s, threshold now: %s'
                    % (nstations_selected, detector_threshold_seiger))

            tpeaks, apeaks = list(
                zip(*[(tpeak, apeak) for (tpeak, apeak) in zip(
                    *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch))
                      if wmin <= tpeak and tpeak < wmax])) or ([], [])

            tr_stackmax_indx = tr_stackmax.copy(data=False)
            tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32))
            tr_stackmax_indx.set_location('i')

            for (tpeak, apeak) in zip(tpeaks, apeaks):

                iframe = int(round((tpeak - tmin_frames) / deltat_cf))
                imax = frame_argmaxs[iframe]

                latpeak, lonpeak, xpeak, ypeak, zpeak = \
                    grid.index_to_location(imax)

                idetection += 1

                detection = Detection(id='%06i' % idetection,
                                      time=tpeak,
                                      location=geo.Point(lat=float(latpeak),
                                                         lon=float(lonpeak),
                                                         x=float(xpeak),
                                                         y=float(ypeak),
                                                         z=float(zpeak)),
                                      ifm=float(apeak))

                if bark:
                    common.bark()

                logger.info('detection found: %s' % str(detection))

                f = open(detections_path, 'a')
                f.write(
                    '%06i %s %g %g %g %g %g %g\n' %
                    (idetection,
                     util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'),
                     apeak, latpeak, lonpeak, xpeak, ypeak, zpeak))

                f.close()

                ev = detection.get_event()
                f = open(events_path, 'a')
                model.dump_events([ev], stream=f)
                f.close()

                if show_detections or config.save_figures:
                    fmin = min(ifc.fmin for ifc in ifcs)
                    fmax = min(ifc.fmax for ifc in ifcs)

                    fn = figures_path_template % {
                        'id': util.tts(t0).replace(" ", "T"),
                        'format': 'png'
                    }

                    util.ensuredirs(fn)

                    if frames is not None:
                        frames_p = frames
                        tmin_frames_p = tmin_frames
                        iframe_p = iframe

                    else:
                        iframe_min = max(
                            0, int(round(iframe - tpeaksearch / deltat_cf)))
                        iframe_max = min(
                            lengthout - 1,
                            int(round(iframe + tpeaksearch / deltat_cf)))

                        ipsize = iframe_max - iframe_min + 1
                        frames_p = num.zeros((ngridpoints, ipsize))
                        tmin_frames_p = tmin_frames + iframe_min * deltat_cf
                        iframe_p = iframe - iframe_min

                        for (arrays, offsets, shifts, weights) \
                                in parstack_params:

                            frames_p, _ = parstack(arrays,
                                                   offsets,
                                                   shifts,
                                                   weights,
                                                   0,
                                                   offsetout=iwmin +
                                                   iframe_min,
                                                   lengthout=ipsize,
                                                   result=frames_p,
                                                   nparallel=nparallel,
                                                   impl='openmp')

                        if config.sharpness_normalization:
                            frame_p_maxs = frames_p.max(axis=0)
                            frame_p_means = num.abs(frames_p).mean(axis=0)
                            frames_p *= (frame_p_maxs /
                                         frame_p_means)[num.newaxis, :]
                            frames_p *= norm_map[:, num.newaxis]

                        if config.ifc_count_normalization:
                            frames_p *= 1.0 / len(ifcs)
                    try:
                        plot.plot_detection(grid,
                                            receivers,
                                            frames_p,
                                            tmin_frames_p,
                                            deltat_cf,
                                            imax,
                                            iframe_p,
                                            xpeak,
                                            ypeak,
                                            zpeak,
                                            tr_stackmax,
                                            tpeaks,
                                            apeaks,
                                            detector_threshold_seiger,
                                            wmin,
                                            wmax,
                                            pdata,
                                            trs,
                                            fmin,
                                            fmax,
                                            idetection,
                                            tpeaksearch,
                                            movie=show_movie,
                                            show=show_detections,
                                            save_filename=fn,
                                            event=ev)
                    except:
                        pass

                    del frames_p

                if stop_after_first:
                    return

            tr_stackmax.chop(wmin, wmax)
            tr_stackmax_indx.chop(wmin, wmax)
            if save_imax is True:
                io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template)

            del frames
        logger.info('end processing time window group: %s - %s' %
                    (util.time_to_str(tmin_win), util.time_to_str(tmax_win)))
    cat = Catalog()
    files = glob("%s/../figures/*qml*" % run_path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
                                             phasename=phasename, 
                                             event=event)
                    picks.append(m)
            progbar.update(i)

        progbar.finish()
        with open(fn, 'w') as f:
            f.write(datastr)

        gui_util.save_markers(picks, 'picks.pf')

    def reduce_stf(self, event, t):
        t -= self.interp_stf(event.magnitude)/2.
        return t
if __name__=='__main__':
    stations = model.load_stations('/media/usb/webnet/meta/stations.pf')
    events = []
    for efile in glob.glob('/home/marius/src/swarming/unperturbed/*/event.pf'):
        events.append(model.Event(load=efile))
    #events = model.load_events('/home/marius/src/swarming/events_swarm.pf')
    want_phases = {'p':('p', 'P'), 's':('s', 'S')}
    e = LocalEngine(use_config=True)
    s = e.get_store('vogtland_fischer_horalek_2000_vpvs169_minus4p')
    m = s.config.earthmodel_1d
    b = Bakery(stations=stations, 
               events=events,
               wanted_phases=want_phases, 
               model=m)
    model.dump_events(events, 'dumped_catalog.pf')
    b.run_n_write('picks.dat')
with open(fn, 'r') as f:
    # windows empty line separation
    events = f.read().split('\r\n\r\n')

py_events = []
pattern = re.compile(r'((?P<mid>\D{3}\d{2})  (?P<t>\d{6}.\d) (?P<place>\D*\(\d{3}\)) (?P<lat>\d{2}.\d{2})N (?P<lon>\d{2}.\d{2})E h=(?P<depth>\d*)km mb=(?P<mb>\d.\d)', flags=re.IGNORECASE)

for e in events:
    m = pattern.findall(e)
    if m:
        groupdict = m.groupdict()
        t = groupdict['t']
        #mb = groupdict['mb']
        #mw = groupdict['mw']
        #print '...................'
        #print mb
        #print mw
        date_str = '%s-%s-%s %s:%s:%s' % (year, month, groupdict['mid'][3:],
                                          t[0:2], t[2:4], t[4:])
        t = str_to_time(date_str)
        py_events.append(Event(name=groupdict['place'],
                               time=t,
                               lat=float(groupdict['lat']),
                               lon=float(groupdict['lon']),
                               depth=float(groupdict['depth'])))
    else:
        #print 'skipping ', e
        continue

dump_events(py_events, 'events_crawled.pf')
from pyrocko import util, model
from pyrocko.client import catalog

tmin = util.str_to_time('2011-01-01 00:00:00')  # beginning time of query
tmax = util.str_to_time('2011-12-31 23:59:59')

# create an instance of the global CMT catalog
global_cmt_catalog = catalog.GlobalCMT()

# query the catalog
events = global_cmt_catalog.get_events(
    time_range=(tmin, tmax),
    magmin=2.,
    latmin=-35.,
    latmax=-20.,
    lonmin=-76.,
    lonmax=-65.)

print('Downloaded %s events' % len(events))
print('The last one is:')
print(events[-1])

# dump events to catalog
model.dump_events(events, 'northern_chile_events.txt')
                  mechanisms=mechanisms,
                  magnitudes=magnitudes,
                  stf=stf)


    # setup stations/targets:
    #stats = load_stations(webnet+'/meta/stations.pf')
    stats = load_stations('stations.pf')
    #stats = []
    # Scrutinize the swarm using matplotlib

    noise = Noise(files='/media/usb/webnet/mseed/2008')

    # convert loaded stations to targets (see function at the top).
    #targets = guess_targets_from_stations(stats)
    targets = get_targets(stations, noise.data_pile, store_id=store_id)
    Visualizer(swarm, stats)

    # Processing that data will return a pyrocko.gf.seismosizer.Reponse object.
    response = engine.process(sources=swarm.get_sources(),
                              targets=targets)

    # Save the events
    dump_events(swarm.get_events(), 'events_swarm.pf')
    io.save(response.pyrocko_traces(), 'swarm.mseed')

    convolved_traces = stf.post_process(response)

    # Save traces:
    io.save(convolved_traces.traces_list(), 'swarm_stf.mseed')
Exemplo n.º 16
0
def report(env,
           report_config=None,
           update_without_plotting=True,
           make_index=True,
           make_archive=True,
           nthreads=0):

    if report_config is None:
        report_config = ReportConfig()
        report_config.set_basepath('.')

    event_name = env.get_current_event_name()
    logger.info('Creating report entry for run "%s"...' % event_name)

    fp = report_config.expand_path
    entry_path = expand_template(
        op.join(fp(report_config.report_base_path),
                report_config.entries_sub_path),
        dict(event_name=event_name, problem_name=event_name))

    if op.exists(entry_path) and not update_without_plotting:
        shutil.rmtree(entry_path)

    util.ensuredir(entry_path)
    plots_dir_out = op.join(entry_path, 'plots')
    util.ensuredir(plots_dir_out)
    configs_dir = op.join(op.split(__file__)[0], 'app/configs/')
    rundir_path = env.get_rundir_path()
    try:
        os.system("cp -r %s/grun/plots/* %s" % (rundir_path, plots_dir_out))
    except:
        pass
    os.system("cp %s/plot_collection.yaml %s" % (configs_dir, plots_dir_out))

    util.ensuredir("%s/shakemap/default/" % (plots_dir_out))
    os.system(
        "cp %s/*shakemap.png %s/shakemap/default/shakemap.default.gf_shakemap.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/shakemap.default.plot_group.yaml %s/shakemap/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/location/default/" % (plots_dir_out))
    os.system(
        "cp %s/*location.png %s/location/default/location.default.location.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/location.default.plot_group.yaml %s/location/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/production_data/default/" % (plots_dir_out))
    os.system(
        "cp %s/*production_data.png %s/production_data/default/production_data.default.production_data.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/production_data.default.plot_group.yaml %s/production_data/default/"
        % (configs_dir, plots_dir_out))

    util.ensuredir("%s/waveforms/default/" % (plots_dir_out))
    os.system(
        "cp %s/waveforms_1.png %s/waveforms/default/waveforms.default.waveforms_1.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_2.png %s/waveforms/default/waveforms.default.waveforms_2.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_3.png %s/waveforms/default/waveforms.default.waveforms_3.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/waveforms.default.plot_group.yaml %s/waveforms/default/" %
              (configs_dir, plots_dir_out))

    os.system("cp %s/grun/config.yaml %s/config.yaml" %
              (rundir_path, entry_path))

    try:

        event = model.load_events(rundir_path + "event.txt")
        model.dump_events(event,
                          filename=op.join(entry_path, 'event_reference.yaml'),
                          format="yaml")
        event = event[0]
        from silvertine import plot
        pcc = report_config.plot_config_collection.get_weeded(env)
        plot.make_plots(env,
                        plots_path=op.join(entry_path, 'plots'),
                        plot_config_collection=pcc)

        try:
            run_info = env.get_run_info()
        except environment.NoRundirAvailable:
            run_info = None

        rie = ReportIndexEntry(path='.',
                               problem_name=event_name,
                               silvertine_version="0.01",
                               run_info=run_info)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_best = guts.load(filename=fn)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_reference = guts.load(filename=fn)

        fn = op.join(entry_path, 'index.yaml')
        guts.dump(rie, filename=fn)

        logger.info('Done creating report entry for run "%s".' % "test")

    #    report_index(report_config)

    #    if make_archive:
    #        report_archive(report_config)
    except FileNotFoundError:
        pass
Exemplo n.º 17
0
                except util.UnavailableDecimation as e:
                    logger.warn('Cannot downsample %s.%s.%s.%s: %s' %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info('prepared waveforms from %i stations' % len(stations))
Exemplo n.º 18
0
Origin = C.parseConfig('origin')
Conf = globalConf()
Config = C.parseConfig('config')

filter = FilterCfg(Config)

cfg = ConfigObj(dict=Config)
minDist, maxDist = cfg.FloatRange('mindist', 'maxdist')

ev = Event(Origin['lat'],Origin['lon'],Origin['depth'],Origin['time'] )
event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=float(ev.depth)*1000., time=util.str_to_time(ev.time))
newFreq = float(filter.newFrequency())
options.time = Origin ['time']
options.duration = int(Conf['duration'])
sdspath = os.path.join(options.eventpath,'data')
model.dump_events([event], sdspath+'event.pf')

tmin = util.str_to_time(ev.time)-600.
tmax = util.str_to_time(ev.time)+1800.

def get_stations(site, lat, lon, rmin, rmax, tmin, tmax, channel_pattern='BH*'):
    extra = {}
    if site == 'iris':
        extra.update(matchtimeseries=True)

    sx = fdsn.station(
            site=site, latitude=lat, longitude=lon,
            minradius=rmin, maxradius=rmax,
            startbefore=tmin, endafter=tmax, channel=channel_pattern,
            format='text', level='channel', includerestricted=False)
Exemplo n.º 19
0
                    str(eml.lon),
                    str(eml.depth),
                    str(eml.magnitude),
                    str(emw.magnitude)
                ])

            event_params.append([
                str(util.tts(eml.time)),
                str(eml.time),
                str(eml.lat),
                str(eml.lon),
                str(eml.depth),
                str(eml.magnitude),
                str(emw.magnitude)
            ])
model.dump_events(land_events, "landau_events.pf")
model.dump_events(ins_events, "ins_events.pf")

with open('seiger_catalog_alpha.csv', 'w', newline='') as csvfile:
    spamwriter = csv.writer(csvfile,
                            delimiter=',',
                            quotechar='"',
                            quoting=csv.QUOTE_MINIMAL)
    spamwriter.writerow(
        ["Time", "Time(s)unix time", "Lat", "Lon", "Tiefe", "Ml", "Mw"])
    for p in event_params:
        spamwriter.writerow(p)

with open('seiger_catalog_alpha_landau.csv', 'w', newline='') as csvfile:
    spamwriter = csv.writer(csvfile,
                            delimiter=',',
    "mt3",
    "mt4",
    "mt5",
    "dc?",
    "_",
    "_",
]

order_mapping = dict(zip(order, range(len(order))))
wanted = ["name", "lat", "lon", "depth"]

events = []
with open(fn, "r") as f:
    for line in f.readlines():
        data = line.split()
        kwargs = {}
        for kw in wanted:
            val = data[order_mapping[kw]]
            val = float(val) if kw is not "name" else val
            val = float(val) * 1000.0 if kw is "depth" else val
            kwargs.update({kw: val})
        time = str_to_time("%s %s" % (data[order_mapping["date"]].replace("/", "-"), data[order_mapping["time"]]))
        kwargs.update({"time": time})
        m = num.array([num.float(data[order_mapping["mt%i" % i]]) for i in range(6)])
        mt = MomentTensor(m=symmat6(*m) * float(data[order_mapping["moment"]]))
        kwargs.update({"moment_tensor": mt})
        e = Event(**kwargs)
        events.append(e)

dump_events(events, "events2008_mt.pf")
Exemplo n.º 21
0
def command_matrix(args):
    '''
    Execution of command matrix
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

        parser.add_option('--view',
                          dest='view',
                          action='store_true',
                          help='view similarity matrix')

        parser.add_option('--savefig',
                          dest='savefig',
                          action='store_true',
                          help='save figure of similarity matrix')

    parser, options, args = cl_parse('matrix', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if not os.path.isdir(conf.project_dir):
        die('project directory missing: %s' % conf.project_dir)

    simmat_temporal_fn = os.path.join(conf.project_dir, 'simmat_temporal.npy')

    if ((not options.force) and (os.path.isfile(simmat_temporal_fn))):
        die('similarity matrix exists: %s; use force option' %
            simmat_temporal_fn)

    catalog_ref_fn = os.path.join(conf.project_dir, 'catalog.pf')
    if os.path.isfile(catalog_ref_fn):
        allevents = model.load_events(catalog_ref_fn)
    else:
        die('catalog missing: %s' % catalog_ref_fn)

    if conf.sw_simmat:
        if not os.path.isfile(conf.sim_mat_fn):
            die('similarity matrix missing: %s' % conf.sim_mat_fn)
        if conf.sim_mat_type == 'binary':
            if os.path.isfile(conf.sim_mat_fn):
                simmat = sccluster.load_similarity_matrix(conf.sim_mat_fn)
            else:
                die('cannot read similarity matrix: %s' % conf.sim_mat_fn)
        else:
            die('ascii format for similarity matrix not yet implemented')

        if len(allevents) != len(simmat):
            print(len(allevents), len(simmat))
            die('clustering stopped, number of events ' +
                'differs from matrix size')

        new_catalog_fn = os.path.join(conf.project_dir,
                                      'events_to_be_clustered.pf')
        model.dump_events(allevents, new_catalog_fn)

    else:
        if conf.metric in config.acceptable_mt_based_metrics:
            events = [ev for ev in allevents if ev.moment_tensor is not None]
        else:
            events = [ev for ev in allevents]
        new_catalog_fn = os.path.join(conf.project_dir,
                                      'events_to_be_clustered.pf')
        model.dump_events(events, new_catalog_fn)

        simmat = sccluster.compute_similarity_matrix(events, conf.metric)

    sccluster.save_similarity_matrix(simmat, simmat_temporal_fn)

    simmat_fig_fn = os.path.join(conf.project_dir,
                                 'simmat_temporal.' + conf.figure_format)
    if options.view and options.savefig:
        scplot.view_and_savefig_similarity_matrix(simmat, simmat_fig_fn,
                                                  'Sorted chronologically')
    else:
        if options.view:
            scplot.view_similarity_matrix(simmat, 'Sorted chronologically')
        if options.savefig:
            scplot.savefig_similarity_matrix(simmat, simmat_fig_fn,
                                             'Sorted chronologically')

    print('Similarity matrix computed and stored as "%s"' % simmat_temporal_fn)
    if options.savefig:
        print('Similarity matrix figure saved as "%s"' % simmat_fig_fn)
Exemplo n.º 22
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
Exemplo n.º 23
0
def command_cluster(args):
    '''
    Execution of command cluster
    '''
    def setup(parser):

        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing project directory')

        parser.add_option('--view',
                          dest='view',
                          action='store_true',
                          help='view similarity matrix after clustering')

        parser.add_option(
            '--savefig',
            dest='savefig',
            action='store_true',
            help='save figure of similarity matrix after clustering')

    parser, options, args = cl_parse('cluster', args, setup)

    if len(args) != 1:
        help_and_die(parser, 'missing argument')
    else:
        fn_config = args[0]

    if not os.path.isfile(fn_config):
        die('config file missing: %s' % fn_config)

    conf = load(filename=fn_config)
    config.check(conf)

    if not os.path.isdir(conf.project_dir):
        die('project directory missing: %s' % conf.project_dir)

    resdir = os.path.join(conf.project_dir, 'clustering_results')
    if not (options.force):
        if (os.path.isdir(resdir)):
            die('clustering result directory exists; use force option')
    if options.force:
        if (os.path.isdir(resdir)):
            shutil.rmtree(resdir)
    os.mkdir(resdir)

    simmat_temporal_fn = os.path.join(conf.project_dir, 'simmat_temporal.npy')
    if not os.path.isfile(simmat_temporal_fn):
        die('similarity matrix does not exists: %s; ' +
            'use seiscloud matrix first' % simmat_temporal_fn)

    new_catalog_fn = os.path.join(conf.project_dir,
                                  'events_to_be_clustered.pf')
    if not os.path.isfile(new_catalog_fn):
        die('catalog of selected events does not exists: %s; ' +
            'use seiscloud matrix first' % new_catalog_fn)

    simmat_temp = sccluster.load_similarity_matrix(simmat_temporal_fn)
    events = model.load_events(new_catalog_fn)
    eventsclusters = sccluster.dbscan(simmat_temp, conf.dbscan_nmin,
                                      conf.dbscan_eps,
                                      conf.sw_force_cluster_all)
    clusters = sccluster.get_clusters(events, eventsclusters)
    if min(eventsclusters) == -1:
        noise_cluster_empty = False
    else:
        noise_cluster_empty = True

    sccluster.save_all(events, eventsclusters, clusters, conf, resdir)
    simmat_clus = sccluster.get_simmat_clustered(events, eventsclusters,
                                                 clusters, conf, resdir,
                                                 simmat_temp)

    simmat_clustered_fn = os.path.join(conf.project_dir,
                                       'simmat_clustered.npy')
    sccluster.save_similarity_matrix(simmat_clus, simmat_clustered_fn)

    print('I run seiscloud for the project in "%s"' % conf.project_dir)
    n_clusters = len(clusters)
    if not noise_cluster_empty:
        n_clusters = n_clusters - 1
    print(str(n_clusters) + ' cluster(s) found')

    simmat_fig_fn = os.path.join(conf.project_dir,
                                 'simmat_clustered.' + conf.figure_format)
    if options.view and options.savefig:
        scplot.view_and_savefig_similarity_matrix(simmat_clus, simmat_fig_fn,
                                                  'Sorted after clustering')
    else:
        if options.view:
            scplot.view_similarity_matrix(simmat_clus,
                                          'Sorted after clustering')
        if options.savefig:
            scplot.savefig_similarity_matrix(simmat_clus, simmat_fig_fn,
                                             'Sorted after clustering')

    for cluster in clusters:
        selevents = clusters[cluster]
        median = sccluster.get_median(selevents, conf)
        fn_median = os.path.join(resdir,
                                 'median_cluster' + str(cluster) + '.pf')
        model.dump_events([median], fn_median)


#        print(cluster,len(selevents))

    print('Similarity matrix after clustering computed and stored as "%s"' %
          simmat_clustered_fn)
    if options.savefig:
        print('Similarity matrix figure saved as "%s"' % simmat_fig_fn)
from pyrocko import util, model
from pyrocko.client import catalog

tmin = util.str_to_time('2011-01-01 00:00:00')  # beginning time of query
tmax = util.str_to_time('2011-12-31 23:59:59')

# create an instance of the global CMT catalog
global_cmt_catalog = catalog.GlobalCMT()

# query the catalog
events = global_cmt_catalog.get_events(time_range=(tmin, tmax),
                                       magmin=2.,
                                       latmin=-35.,
                                       latmax=-20.,
                                       lonmin=-76.,
                                       lonmax=-65.)

print('Downloaded %s events' % len(events))
print('The last one is:')
print(events[-1])

# dump events to catalog
model.dump_events(events, 'northern_chile_events.txt')
Exemplo n.º 25
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option(
        "--force",
        dest="force",
        action="store_true",
        default=False,
        help="allow recreation of output <directory>",
    )

    parser.add_option(
        "--debug",
        dest="debug",
        action="store_true",
        default=False,
        help="print debugging information to stderr",
    )

    parser.add_option(
        "--dry-run",
        dest="dry_run",
        action="store_true",
        default=False,
        help="show available stations/channels and exit "
        "(do not download waveforms)",
    )

    parser.add_option(
        "--continue",
        dest="continue_",
        action="store_true",
        default=False,
        help="continue download after a accident",
    )

    parser.add_option(
        "--local-data",
        dest="local_data",
        action="append",
        help="add file/directory with local data",
    )

    parser.add_option(
        "--local-stations",
        dest="local_stations",
        action="append",
        help="add local stations file",
    )

    parser.add_option(
        "--local-responses-resp",
        dest="local_responses_resp",
        action="append",
        help="add file/directory with local responses in RESP format",
    )

    parser.add_option(
        "--local-responses-pz",
        dest="local_responses_pz",
        action="append",
        help="add file/directory with local pole-zero responses",
    )

    parser.add_option(
        "--local-responses-stationxml",
        dest="local_responses_stationxml",
        help="add file with local response information in StationXML format",
    )

    parser.add_option(
        "--window",
        dest="window",
        default="full",
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        "] (time format is YYYY-MM-DD HH:MM:SS)",
    )

    parser.add_option(
        "--out-components",
        choices=["enu", "rtu"],
        dest="out_components",
        default="rtu",
        help="set output component orientations to radial-transverse-up [rtu] "
        "(default) or east-north-up [enu]",
    )

    parser.add_option(
        "--padding-factor",
        type=float,
        default=3.0,
        dest="padding_factor",
        help="extend time window on either side, in multiples of 1/<fmin_hz> "
        "(default: 5)",
    )

    parser.add_option(
        "--credentials",
        dest="user_credentials",
        action="append",
        default=[],
        metavar="SITE,USER,PASSWD",
        help="user credentials for specific site to access restricted data "
        "(this option can be repeated)",
    )

    parser.add_option(
        "--token",
        dest="auth_tokens",
        metavar="SITE,FILENAME",
        action="append",
        default=[],
        help="user authentication token for specific site to access "
        "restricted data (this option can be repeated)",
    )

    parser.add_option(
        "--sites",
        dest="sites",
        metavar="SITE1,SITE2,...",
        #    default='bgr',
        default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080",
        help='sites to query (available: %s, default: "%%default"' %
        ", ".join(g_sites_available),
    )

    parser.add_option(
        "--band-codes",
        dest="priority_band_code",
        metavar="V,L,M,B,H,S,E,...",
        default="V,L,M,B,H,E",
        help="select and prioritize band codes (default: %default)",
    )

    parser.add_option(
        "--instrument-codes",
        dest="priority_instrument_code",
        metavar="H,L,G,...",
        default="H,L,O,",
        help="select and prioritize instrument codes (default: %default)",
    )

    parser.add_option(
        "--radius-min",
        dest="radius_min",
        metavar="VALUE",
        default=0.0,
        type=float,
        help="minimum radius [km]",
    )

    parser.add_option(
        "--tinc",
        dest="tinc",
        metavar="VALUE",
        default=3600.0 * 12.0,
        type=float,
        help="length of seperate saved files in s",
    )

    parser.add_option(
        "--nstations-wanted",
        dest="nstations_wanted",
        metavar="N",
        type=int,
        help="number of stations to select initially",
    )

    (options, args) = parser.parse_args(sys.argv[1:])
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, "debug")
    else:
        util.setup_logging(program_name, "info")

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical("cannot use local responses in PZ and RESP "
                        "format at the same time")
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml,
    ):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical("can only handle local responses from either PZ or "
                        "RESP or StationXML. Cannot yet merge different "
                        "response formats.")
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical("--local-responses-resp can only be used "
                        "when --stations is also given.")
        sys.exit(1)

    try:
        ename = ""
        magnitude = None
        mt = None
        if len(args) == 9:
            time = util.str_to_time(args[0] + " " + args[1])
            lat = float(args[2])
            lon = float(args[3])
            depth = float(args[4]) * km
            iarg = 5

        elif len(args) == 6:
            if args[1].find(":") == -1:
                sname_or_date = None
                lat = float(args[0])
                lon = float(args[1])
                event = None
                time = None
            else:
                sname_or_date = args[0] + " " + args[1]

            iarg = 2

        elif len(args) == 5:
            sname_or_date = args[0]
            iarg = 1

        if len(args) in (6, 5) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical("no event found")
                sys.exit(1)
            elif len(events) > 1:
                logger.critical("more than one event found")
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        event_dir = op.join("data", "events", eventname)
        output_dir = op.join(event_dir, "waveforms")
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time,
            lat=lat,
            lon=lon,
            depth=depth,
            name=ename,
            magnitude=magnitude,
            moment_tensor=mt,
        )

    if options.window == "full":
        if event is None:
            logger.critical("need event for --window=full")
            sys.exit(1)

        low_velocity = 1500.0
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == "p":
        if event is None:
            logger.critical("need event for --window=p")
            sys.exit(1)

        phases = list(map(cake.PhaseDef, "P p".split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error("required phase arrival not found")
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(",")
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    priority_band_code = options.priority_band_code.split(",")
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical("invalid band code: %s" % s)

    priority_instrument_code = options.priority_instrument_code.split(",")
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical("invalid instrument code: %s" % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min * km * cake.m2d,
        maxradius=radius * cake.m2d,
        channel=",".join("?%s?" % s for s in priority_band_code),
    )

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ["M/S", "M", "M/S**2"]

    output_units = "M"

    sites = [x.strip() for x in options.sites.split(",") if x.strip()]
    tinc = options.tinc
    #  for site in sites:
    #     if site not in g_sites_available:
    #        logger.critical('unknown FDSN site: %s' % site)
    #       sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(",")
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(",")
            with open(token_filename, "r") as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical("cannot get token from file: %s" % token_filename)
            sys.exit(1)

    fn_template0 = (
        "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed")

    fn_template_raw = op.join(output_dir, "raw", fn_template0)
    fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed")
    fn_stations_raw = op.join(output_dir, "stations.raw.txt")
    fn_template_rest = op.join(output_dir, "rest", fn_template0)
    fn_commandline = op.join(output_dir, "seigerdown.command")

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                "iris": dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == "geonet":
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(site in g_user_credentials
                                       or site in g_auth_tokens),
                )

            logger.info("downloading channel information (%s)" % site)
            sx = fdsn.station(site=site,
                              format="text",
                              level="channel",
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error("No stations matching given criteria. (%s)" % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}
    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()
            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                nsl_to_station[nsl] = s  # using first site with this station
    logger.info("number of stations found: %i" % len(nsl_to_station))

    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info("number of stations selected: %i" % len(nsls_selected))

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info("time window %i/%i (%s - %s)" %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win),
            )

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = tmax_ + tpad

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)

                    if channel.sample_rate:
                        deltat = 1.0 / channel.sample_rate.value
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        selection.append(nslc + (tmin_req - deltat * 10.0,
                                                 tmax_req + deltat * 10.0))

            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ""
                        if nbatches > 1:
                            sbatch = " (batch %i/%i)" % (
                                (i // neach) + 1, nbatches)

                        logger.info("downloading data (%s)%s" % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            if tr.station == "7869":
                                tr.station = "MOER"
                                tr.network = "LE"
                                tr.location = ""
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        io.save(trs, fn_template_raw_folder)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn("overwriting file %s", fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn("an error occurred while downloading data "
                                    "for channels \n  %s" %
                                    "\n  ".join(".".join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return "" if x == 1 else "s"

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info("selected: %s.%s.%s.%s from site%s %s" %
                        (nslc + (plural_s(len(sites)), "+".join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info("selected (%s): %i channel%s (%i station%s)" % (
                "+".join(sites),
                nchannels,
                plural_s(nchannels),
                nstations,
                plural_s(nstations),
            ))

        logger.info("selected total: %i channel%s (%i station%s)" % (
            nchannels_all,
            plural_s(nchannels_all),
            nstations_all,
            plural_s(nstations_all),
        ))

        logger.info("dry run done.")
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info("downloading response information (%s)" % site)
            sxs[site] = fdsn.station(site=site,
                                     level="response",
                                     selection=selection)
            sited = site

            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            elif site == "http://ws.gpi.kit.edu":
                sited = "kit"
            if site == "http://188.246.25.142:8080":
                sited = "moer"

            sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" %
                                                sited))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site["local"] = set()
        plocal = pile.make_pile(options.local_data, fileformat="detect")
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.station == "7869":
                    tr.station = "MOER"
                    tr.network = "LE"
                    tr.location = ""
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site["local"].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append("local")

    if options.local_responses_pz:
        sxs["local"] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs["local"] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs["local"] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error("no data available")
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0)
    otinc = 3600.0
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            if tr.station == "7869":
                tr.station = "MOER"
                tr.network = "LE"
                tr.location = ""
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units,
                    )

                    break

                except stationxml.NoResponseInformation:
                    failure.append("%s: no response information" % site)

                except stationxml.MultipleResponseInformation:
                    failure.append("%s: multiple response information" % site)

            if response is None:
                failure = ", ".join(failure)

            else:
                failure = ""
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = "trace too short"

            if failure:
                logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap="crossfade_cos")

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    if tr.station == "7869":
                        tr.station = "MOER"
                        tr.network = "LE"
                        tr.location = ""
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s"

    fn_waveforms = op.join(output_dir, "prepared", fn_template1)
    fn_stations = op.join(output_dir, "stations.prepared.txt")
    fn_event = op.join(event_dir, "event.txt")

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn("Cannot downsample %s.%s.%s.%s: %s" %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == "rtu":
            pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z"))
        elif options.out_components == "enu":
            pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z"))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info("prepared waveforms from %i stations" % len(stations))