def call(self):
        template = 'trace_%(network)s.%(station)s.%(location)s.%(channel)s'
        try:

            if self.format == 'text':
                default_output_filename = template + '.dat'

            else:
                default_output_filename = template + '.' + self.format

            out_filename = self.output_filename('Template for output files',
                                                default_output_filename)
        except NoViewerSet:
            out_filename = self.out_filename

        traces = self.chopper_selected_traces(fallback=True)
        for trs in traces:
            for tr in trs:
                if self.format == 'mseed':
                    if len(tr.network) > 2:
                        tr.set_network(tr.network[:2])
                    if len(tr.station) > 5:
                        tr.set_station(tr.station[:5])
                    if len(tr.location) > 2:
                        tr.set_location(tr.location[:2])
                    if len(tr.channel) > 3:
                        tr.set_channel(tr.channel[:3])
                io.save(tr, out_filename, format=self.format)
def restitute_evalresp(tr_fn):
    traces = io.load(tr_fn)
    out_traces = []
    for tr in traces:
        try:
            try:
                evalresp = trace.Evalresp(respfile=evalresps['%s.%s'%(tr.station, tr.channel)],
                                          nslc_id=tr.nslc_id,
                                          target='dis')

            except KeyError:
                print 'skip ', '.'.join(tr.nslc_id[1:])
                continue

            if tr.station=='nkc' or tr.station=='zhc':
                t_taper = 30
                f_taper = (0.05, 0.08, 50., 75.),     # frequency domain taper in [hz]
            else:
                t_taper = 5.
                f_taper = (0.3, 0.6, 50., 75.),     # frequency domain taper in [hz]

            displacement =  tr.transfer(
                t_taper,                       # rise and fall of time domain taper in [s]
                *f_taper,     # frequency domain taper in [hz]
                transfer_function=evalresp, 
                invert=True)

        except trace.TraceTooShort:
            continue
        out_traces.append(displacement)
    tr_fn = tr_fn.replace(inputdir, outputdir)
    print tr_fn
    io.save(out_traces, tr_fn)

    del traces
Esempio n. 3
0
def write_container_to_dirs(container, base_dir, pad_traces=True):
    sources = container.sources
    targets = container.targets

    if not os.path.exists(base_dir):
        os.makedirs(base_dir)

    p = progressbar.ProgressBar(widgets=['writing: ',
                                         progressbar.Percentage(),
                                         progressbar.Bar()],
                                maxval=len(sources)).start()
    for i_s, s in enumerate(sources):
        _trs = [container[s][t] for t in targets]
        if pad_traces:
            _trs = do_pad_traces(_trs)
        e = s.pyrocko_event()
        e.set_name(str(i_s))
        out_dir = os.path.join(base_dir, dir_from_event(e))
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)

        for tr in _trs:
            io.save(tr, filename_template=out_dir+'/tr_%(network)s.%(station)s.%(location)s.%(channel)s.mseed')

        e.dump(os.path.join(out_dir, 'event.pf'))

        p.update(i_s)
    p.finish()
    def call(self):
        p = self.get_pile()

        try:
            markers = self.get_selected_event_markers()
        except NoViewerSet:
            markers = load_markers(self.markers_filename)

        try:

            out_filename = self.output_filename('Template for output files',
                                                default_output_filename)
        except NoViewerSet:
            out_filename = self.out_filename

        for m in markers:
            event = m.get_event()
            eventname = event.name
            if not eventname:
                eventname = util.time_to_str(event.time, format='%Y-%m-%d_%H-%M-%S')

            traces = p.all(tmin=event.time + self.tbeg,
                           tmax=event.time + self.tend)

            io.save(traces, out_filename, additional=dict(
                eventname=eventname))
Esempio n. 5
0
 def save_traces_mseed(self, filename_tmpl='%(whichset)s_%(network)s_%(station)s_%(location)s_%(channel)s.mseed',
         overwrite_network=None, component_to_channel={}, location_map={}):
     
     station, network = self.get_station(), self.get_network()
     if overwrite_network is not None:
         network = overwrite_network
         
     fns = []
     for icomp, comp in enumerate(self.components):
         channel = component_to_channel.get(comp, comp)
         for (whichset, sgram) in zip(('references', 'synthetics'), 
                          (self.ref_seismograms[icomp], self.syn_seismograms[icomp])):
             if sgram and len(sgram[0]) > 1:
                 starttime = sgram[0][0]
                 endtime = sgram[0][-1]
                 deltat = (endtime-starttime)/(len(sgram[0])-1)
                 data = sgram[1]
                 location = location_map.get(whichset, whichset)
                 tr = trace.Trace(network[:2], station[:5], location[:2], channel[:3], 
                     tmin = starttime, tmax=endtime, deltat=deltat, ydata=data)
                     
                 fn = filename_tmpl % { 'whichset': whichset,
                                        'network': network,
                                        'station': station,
                                        'location': location,
                                        'channel': channel }
                                        
                 io.save([tr], fn)
                 fns.append(fn)
     return fns
    def make_noise_trace(
            self, tmin, tmax, nslc_id, target_nslc_id=None, merge_traces=None,
            outdir='noise_concat', template='tr_%n.%s.%l.%c.-%(tmin)s.mseed'):

        if target_nslc_id is None:
            target_nslc_id = nslc_id

        n, s, l, c = target_nslc_id
        noise_tr = self.noise[(n, s, l, c)]
        deltat = noise_tr.deltat
        self.resample_many(merge_traces, deltat)
        overlap = 0.2                           # 20 percent of sample length
        sample_length = 60.                     # seconds
        ns = int(sample_length/deltat)          # number of samples
        fader = trace.CosFader(xfrac=overlap)
        noisey = noise_tr.get_ydata()
        buffer_size = num.int(sample_length*60./deltat)
        i_dumped = 0.
        buffer_y = num.zeros(buffer_size)
        ioverflow = 0
        overflow = None
        taper = trace.costaper(
            0., overlap/2.*sample_length, sample_length*(1.0-overlap/2.),
            sample_length, ns, deltat)
        for i, istart in enumerate(num.arange(0, int(num.ceil((tmax-tmin)/deltat)), int(ns*(1.0 - overlap/2.0)))):
            istart = int(num.floor((istart+ioverflow) % buffer_size))
            istop = istart + ns
            nmissing = buffer_size - istop
            isample_start = int(num.floor(num.random.uniform(0, len(noisey)-ns)))
            isample_stop = isample_start + ns
            noise_sample = num.zeros(ns)
            noise_sample[:] = noisey[isample_start:isample_stop]
            noise_sample *= taper
            if nmissing<0:
                isample_stop = ns + nmissing
                istop = istart + ns + nmissing
                ioverflow = isample_stop
                overflow = noise_sample[ioverflow:]
                noise_sample = noise_sample[:ioverflow]
            buffer_y[istart: istop] += noise_sample
            # Problem mit ioverlap.
            if overflow is not None:
                tmin_tr = tmin+i_dumped*buffer_size*deltat
                tmax_tr = tmin_tr+(buffer_size-1)*deltat
                buff_tr = trace.Trace(network=n, station=s, location=l, channel=c,
                                      tmin=tmin_tr, tmax=tmax_tr,
                                      deltat=noise_tr.deltat, ydata=buffer_y)
                if merge_traces is not None:
                    for mtr in merge_traces:
                        if buff_tr.is_relevant(mtr.tmin, mtr.tmax):
                            mtr = mtr.taper(self.merge_fader, inplace=False)
                            buff_tr.add(mtr)
                            #print max(num.abs(mtr.ydata))
                            #trace.snuffle([mtr, buff_tr])
                io.save(buff_tr, pjoin(outdir, template))
                buffer_y = num.zeros(buffer_size)
                buffer_y[0:ns-ioverflow] = overflow
                i_dumped += 1.
                overflow = None
Esempio n. 7
0
 def testLongCode(self):
     c = '1234567'
     tr = trace.Trace(c,c,c,c, ydata=num.zeros(10))
     e = None
     try:
         io.save(tr, 'test.mseed')
     except mseed.CodeTooLong, e:
         pass
Esempio n. 8
0
 def testLongCode(self):
     c = '1234567'
     tr = trace.Trace(c, c, c, c, ydata=num.zeros(10))
     e = None
     try:
         io.save(tr, 'test.mseed')
     except mseed.CodeTooLong as e:
         assert isinstance(e, mseed.CodeTooLong)
Esempio n. 9
0
def command_extract(args):
    def setup(parser):
        parser.add_option(
            '--format', dest='format', default='mseed',
            choices=['mseed', 'sac', 'text', 'yaff'],
            help='export to format "mseed", "sac", "text", or "yaff". '
                 'Default is "mseed".')

        fndfl = 'extracted/%(irecord)s_%(args)s.%(extension)s'
        parser.add_option(
            '--output', dest='output_fn', default=fndfl, metavar='TEMPLATE',
            help='output path template [default: "%s"]' % fndfl)

    parser, options, args = cl_parse('extract', args, setup=setup)
    try:
        sdef = args.pop()
    except Exception:
        parser.error('cannot get <selection> argument')

    try:
        gdef = gf.meta.parse_grid_spec(sdef)
    except gf.meta.GridSpecError as e:
        die(e)

    store_dir = get_store_dir(args)

    extensions = {
        'mseed': 'mseed',
        'sac': 'sac',
        'text': 'txt',
        'yaff': 'yaff'}

    try:
        store = gf.Store(store_dir)
        for args in store.config.iter_extraction(gdef):
            gtr = store.get(args)
            if gtr:
                tr = trace.Trace(
                    '', '', '', util.zfmt(store.config.ncomponents) % args[-1],
                    ydata=gtr.data,
                    deltat=gtr.deltat,
                    tmin=gtr.deltat * gtr.itmin)

                additional = dict(
                    args='_'.join('%g' % x for x in args),
                    irecord=store.str_irecord(args),
                    extension=extensions[options.format])

                io.save(
                    tr,
                    options.output_fn,
                    format=options.format,
                    additional=additional)

    except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e:
        die(e)
Esempio n. 10
0
    def save(self):
        if not self.current_stuff:
            self.fail('Nothing to save.')
        
        data_fn = self.output_filename(caption='Save Data', dir='data-%(network)s-%(station)s-%(location)s-%(channel)s-%(tmin)s.mseed')
        stations_fn = self.output_filename(caption='Save Stations File', dir='stations.txt')        

        all_traces, stations = self.current_stuff
        io.save(all_traces, data_fn)
        model.dump_stations(stations, stations_fn)
Esempio n. 11
0
    def dump_waveforms(self, engine, sources, path,
                       tmin=None, tmax=None, overwrite=False):
        path_waveforms = op.join(path, 'waveforms')
        gf.store.remake_dir(path_waveforms, force=overwrite)

        path_traces = op.join(
            path_waveforms,
            '%(wmin_year)s',
            '%(wmin_month)s',
            '%(wmin_day)s',
            'waveform_%(network)s_%(station)s_' +
            '%(location)s_%(channel)s_%(tmin)s_%(tmax)s.mseed')

        tmin_all, tmax_all = self.get_time_range(sources)
        tmin = tmin if tmin is not None else tmin_all
        tmax = tmax if tmax is not None else tmax_all
        tts = util.time_to_str

        tinc = self.tinc or self.get_useful_time_increment(engine, sources)
        tmin = math.floor(tmin / tinc) * tinc
        tmax = math.ceil(tmax / tinc) * tinc

        nwin = int(round((tmax - tmin) / tinc))

        pbar = util.progressbar('Generating waveforms', nwin)
        for iwin in range(nwin):
            pbar.update(iwin)
            tmin_win = max(tmin, tmin + iwin*tinc)
            tmax_win = min(tmax, tmin + (iwin+1)*tinc)

            if tmax_win <= tmin_win:
                continue

            trs = self.get_waveforms(engine, sources, tmin_win, tmax_win)

            try:
                io.save(
                    trs, path_traces,
                    additional=dict(
                        wmin_year=tts(tmin_win, format='%Y'),
                        wmin_month=tts(tmin_win, format='%m'),
                        wmin_day=tts(tmin_win, format='%d'),
                        wmin=tts(tmin_win, format='%Y-%m-%d_%H-%M-%S'),
                        wmax_year=tts(tmax_win, format='%Y'),
                        wmax_month=tts(tmax_win, format='%m'),
                        wmax_day=tts(tmax_win, format='%d'),
                        wmax=tts(tmax_win, format='%Y-%m-%d_%H-%M-%S')),
                    overwrite=overwrite)
            except FileSaveError as e:
                logger.debug('Waveform exists %s' % e)

        pbar.finish()

        return [path_waveforms]
    def call(self):
        self.cleanup()

        if self.tinc is not None:
            template = \
                'trace_%n.%s.%l.%c_%(tmin_us)s'
        else:
            template = 'trace_%n.%s.%l.%c'

        if self.format == 'text':
            default_output_filename = template + '.dat'

        else:
            default_output_filename = template + '.' + self.format

        out_filename = self.output_filename('Template for output files',
                                            default_output_filename)

        viewer = self.get_viewer()
        for trs in self.chopper_selected_traces(fallback=True, tinc=self.tinc):
            trs2save = []
            for tr in trs:
                if self.format == 'mseed':
                    if len(tr.network) > 2:
                        tr.set_network(tr.network[:2])
                    if len(tr.station) > 5:
                        tr.set_station(tr.station[:5])
                    if len(tr.location) > 2:
                        tr.set_location(tr.location[:2])
                    if len(tr.channel) > 3:
                        tr.set_channel(tr.channel[:3])

                if viewer.lowpass:
                    if viewer.lowpass < 0.5/tr.deltat:
                        tr.lowpass(4, viewer.lowpass, demean=False)
                if viewer.highpass:
                    if viewer.highpass < 0.5/tr.deltat:
                        tr.highpass(4, viewer.highpass, demean=False)

                trs2save.append(tr)

            try:
                io.save(
                    trs2save, out_filename,
                    format=self.format,
                    overwrite=True)

            except io.io_common.FileSaveError as e:
                self.fail(str(e))

        if self.save_stations:
            stations = viewer.stations.values()
            fn = self.output_filename('Save Stations', 'stations.pf')
            model.dump_stations(list(stations), fn)
Esempio n. 13
0
    def save(self):
        if not self.current_stuff:
            self.fail("Nothing to save.")

        data_fn = self.output_filename(
            caption="Save Data", dir="data-%(network)s-%(station)s-%(location)s-%(channel)s-%(tmin)s.mseed"
        )
        stations_fn = self.output_filename(caption="Save Stations File", dir="stations.txt")

        all_traces, stations = self.current_stuff
        io.save(all_traces, data_fn)
        model.dump_stations(stations, stations_fn)
Esempio n. 14
0
 def testDownsampling(self):
     
     n = 1024
     dt1 = 1./125.
     dt2 = 1./10.
     dtinter = 1./util.lcm(1./dt1,1./dt2)
     upsratio = dt1/dtinter
     xdata = num.arange(n,dtype=num.float)
     ydata = num.exp(-((xdata-n/2)/10.)**2)
     t = trace.Trace(ydata=ydata, tmin=sometime, deltat=dt1, location='1')
     t2 = t.copy()
     t2.set_codes(location='2')
     t2.downsample_to(dt2, allow_upsample_max = 10)
     io.save([t,t2], 'test.mseed')
Esempio n. 15
0
 def put_ref_seismograms(self):
     traces_path = self._ref_seismogram_stem+ '-%(ireceiver)i-%(component)s.mseed'
     for irec, (station, receiver, traces) in enumerate(self._dataset):
         for tr in traces:
             fn = traces_path % {
                     'ireceiver': irec+1, 
                     'component': self._kiwi_component_map[tr.channel]}
                     
             tr = tr.copy()
             if self._zero_time != 0.0:
                 tr.shift(-self._zero_time)
             ydata = tr.get_ydata()
             if self._trace_factor != 1.0:
                 ydata *= self._trace_factor
             io.save([tr], fn)
Esempio n. 16
0
def makeManyFiles( nfiles, nsamples, networks, stations, channels, tmin):
    
    datadir = tempfile.mkdtemp()
    traces = []
    deltat=1.0
    for i in xrange(nfiles):
        ctmin = tmin+i*nsamples*deltat # random.randint(1,int(time.time()))
        
        data = num.ones(nsamples)
        traces.append(trace.Trace(rc(networks), rc(stations),'',rc(channels), ctmin, None, deltat, data))
    
    fnt = pjoin( datadir, '%(network)s-%(station)s-%(location)s-%(channel)s-%(tmin)s.mseed')
    io.save(traces, fnt, format='mseed')
    
    return datadir
Esempio n. 17
0
 def testWriteText(self):
     networks = [rn(2) for i in range(5)]
     deltat = 0.1
     tr = trace.Trace(
         rc(networks), rn(4), rn(2), rn(3),
         tmin=time.time()+deltat,
         deltat=deltat,
         ydata=num.arange(100, dtype=num.int32),
         mtime=time.time())
     io.save(
         tr,
         pjoin(
             self.tmpdir,
             '%(network)s_%(station)s_%(location)s_%(channel)s'),
         format='text')
    def __call__(self):

        # Change strike within Snuffler with the added scroll bar.
        #strike = 0
        #dip = 90
        #rake = 0
        #moment = 7.00e20
        depth = 3000
        rise_time = 1
        scale = 1E21
        mxx = 1.*scale
        mxy = 1.*scale
        myz = 1.*scale
        mxz = 1.*scale

        #explosion source
        source_params = dict(zip(['mxx', 'myy', 'mzz', 'mxy', 'mxz',
                                  'myz', 'depth', 'rise-time'],
                                 [mxx, mxx, mxx, mxy, mxz, myz,
                                  depth, rise_time]))

        s = source.Source(sourcetype='moment_tensor', sourceparams=source_params)

        #strike dip rake
        #s = source.Source('bilateral',
        #sourceparams_str ='0 0 0 %g %g %g %g %g 0 0 0 0 1 %g' % (depth, moment, strike, dip, rake, rise_time))

        self.seis.set_source(s)
        recs = self.seis.get_receivers_snapshot(which_seismograms=('syn',),
                                                which_spectra=(),
                                                which_processing='tapered')
        
        trs = []
        for rec in recs:
            for t in rec.get_traces():
                t.shift(rise_time*0.5)
                trs.append(t)

        io.save(trs, 'mseeds/%(network)s_%(station)s_%(location)s_%(channel)s.mseed')

        # Create event:
        ref_event = model.Event(lat=self.olat,
                                lon=self.olon,
                                depth=depth,
                                time=self.otime,
                                name='Reference Event')
        synthetic_event_marker = gui_util.EventMarker(event=ref_event)
        gui_util.Marker.save_markers([synthetic_event_marker], 'reference_marker.txt')
Esempio n. 19
0
    def testWriteRead(self):
        now = time.time()
        n = 10
        deltat = 0.1
        
        networks = [ rn(2) for i in range(5) ]
        
        traces1 = [ trace.Trace(rc(networks), rn(4), rn(2), rn(3), tmin=now+i*deltat*n*2, deltat=deltat, ydata=num.arange(n, dtype=num.int32), mtime=now)
            for i in range(3) ]
            
        tempdir = tempfile.mkdtemp()

        for format in ('mseed', 'sac', 'yaff'):
            fns = io.save(traces1, pjoin(tempdir, '%(network)s_%(station)s_%(location)s_%(channel)s'), format=format)

            for fn in fns:
                assert io.detect_format(fn) == format

            traces2 = []
            for fn in fns:
                traces2.extend(io.load(fn, format='detect'))
                
            for tr in traces1:
                assert tr in traces2, 'failed for format %s' % format
                
            for fn in fns:
                os.remove(fn)

        shutil.rmtree(tempdir)
def restitute_pz(tr_fn):
    traces = io.load(tr_fn)
    out_traces = []
    for tr in traces:
        try:
            try:
                zeros, poles, constant = pz.read_sac_zpk(pole_zeros['%s.%s'%(tr.station, tr.channel)])

            except keyerror:
                print 'skip ', '.'.join(tr.nslc_id[1:])
                continue

            zeros.append(0.0j)

            digitizer_gain = 1e6    
            constant *= digitizer_gain

            # for the conversion of hz-> iw:
            nzeros = len(zeros)
            npoles = len(poles)
            constant *= (2*num.pi)**(npoles-nzeros)

            if tr.station=='nkc' or tr.station=='zhc':
                constant *= normalization_factors[tr.station]
                t_taper = 30
                f_taper = (0.05, 0.08, 50., 75.),     # frequency domain taper in [hz]
            else:
                t_taper = 5.
                f_taper = (0.3, 0.6, 50., 75.),     # frequency domain taper in [hz]

            print tr.station, constant
            pz_transfer = trace.polezeroresponse(zeros, poles, constant)
            displacement =  tr.transfer(
                t_taper,                       # rise and fall of time domain taper in [s]
                *f_taper,     # frequency domain taper in [hz]
                transfer_function=pz_transfer, 
                invert=true)

        except trace.tracetooshort:
            continue
        out_traces.append(displacement)
    tr_fn = tr_fn.replace(inputdir, outputdir)
    print tr_fn
    io.save(out_traces, tr_fn)

    del traces
Esempio n. 21
0
 def _fixate(self, buf):
     if self._path:
         trbuf = buf.get_traces()[0]
         fns = io.save([trbuf], self._path, format=self._format)
         
         self.remove_file(buf)
         if not self._forget_fixed:
             self.load_files(fns, show_progress=False, fileformat=self._format)
Esempio n. 22
0
 def _insert(self, iblock, traces):
     if traces:
         if self._storepath is not None:
             fns = io.save(traces, self._storepath, format='mseed', additional={'iblock': iblock})
             self.load_files(fns, fileformat='mseed', show_progress=False)
         else:
             file = pile.MemTracesFile(None,traces)
             self.add_file(file)
    def call(self):
        self.cleanup()
        if self.combine and not self.format=='mseed':
            self.fail('"Combine" only possible when exporting "mseed"')

        if self.combine:
            template = 'traces_export'
        else:
            template = 'trace_%(network)s.%(station)s.%(location)s.%(channel)s'
        try:

            if self.format == 'text':
                default_output_filename = template + '.dat'

            else:
                default_output_filename = template + '.' + self.format

            out_filename = self.output_filename('Template for output files',
                                                default_output_filename)
        except NoViewerSet:
            out_filename = self.out_filename
        traces = self.chopper_selected_traces(fallback=True)
        trs2save = []
        for trs in traces:
            for tr in trs:
                if self.format == 'mseed':
                    if len(tr.network) > 2:
                        tr.set_network(tr.network[:2])
                    if len(tr.station) > 5:
                        tr.set_station(tr.station[:5])
                    if len(tr.location) > 2:
                        tr.set_location(tr.location[:2])
                    if len(tr.channel) > 3:
                        tr.set_channel(tr.channel[:3])
                if self.combine:
                    trs2save.append(tr)
                else:
                    io.save(tr, out_filename, format=self.format)

        if self.combine:
            io.save(trs2save, out_filename, format=self.format)

        if self.save_stations:
            stations = self.get_viewer().stations.values()
            fn = self.output_filename('Save Stations', 'stations.pf')
            model.dump_stations(stations, fn)
Esempio n. 24
0
    def _fixate(self, buf, complete=True):
        trbuf = buf.get_traces()[0]
        del_state = True
        if self._path:
            if self._fixation_length is not None:
                ttmin = trbuf.tmin
                ytmin = util.year_start(ttmin)
                n = int(math.floor((ttmin - ytmin) / self._fixation_length))
                tmin = ytmin + n*self._fixation_length
                traces = []
                t = tmin
                while t <= trbuf.tmax:
                    try:
                        traces.append(
                            trbuf.chop(
                                t,
                                t+self._fixation_length,
                                inplace=False,
                                snap=(math.ceil, math.ceil)))

                    except trace.NoData:
                        pass
                    t += self._fixation_length

                if abs(traces[-1].tmax - (t - trbuf.deltat)) < \
                        trbuf.deltat/100. or complete:

                    self._pile.remove_file(buf)

                else:  # reinsert incomplete last part
                    new_trbuf = traces.pop()
                    self._pile.remove_file(buf)
                    buf.remove(trbuf)
                    buf.add(new_trbuf)
                    self._pile.add_file(buf)
                    del_state = False

            else:
                traces = [trbuf]
                self._pile.remove_file(buf)

            fns = io.save(traces, self._path, format=self._format)

            if not self._forget_fixed:
                self._pile.load_files(
                    fns, show_progress=False, fileformat=self._format)

        if del_state:
            del self._states[trbuf.nslc_id]
    def save(self):
        fault = okada.OkadaSource(
          strike=self.t_strike, dip=self.t_dip, rake=self.t_strike, # degree
          slip=self.t_slip, # meter
          ztop=self.t_ztop, zbottom=self.t_zbot, length=self.t_length, # meter
          xtrace=self.t_xtrace, ytrace=self.t_ytrace ) # meter

        extent = -self.t_ext, self.t_ext, -self.t_ext, self.t_ext # meter (xmin,xmax,ymin,ymax)
        Y, X = numpy.linspace( extent[2], extent[3] ),numpy.linspace( extent[0], extent[1])

        XYZ = numpy.array([ X, Y, numpy.zeros_like(X) ]).T

        disp = fault.displacement( XYZ, poisson=.25 )
        tmint = util.str_to_time('1970-01-01 00:05:00.000')
        tr_U = trace.Trace(station='disp', channel='Z', deltat=0.5, tmin=tmint, ydata=disp[:,0])
        io.save([tr_U], 'up_displacement.mseed')
        tr_N = trace.Trace(station='disp', channel='N', deltat=0.5, tmin=tmint, ydata=disp[:,1])
        io.save([tr_N], 'north_displacement.mseed')
        tr_N = trace.Trace(station='disp', channel='E', deltat=0.5, tmin=tmint, ydata=disp[:,2])
        io.save([tr_N], 'east_displacement.mseed')
Esempio n. 26
0
def iris_pull(options, conf, event_names):
    conf = conf['iris_pull_config']

    if not event_names:
        sys.exit('need event name')

    preparator = InvResponsePreparator(conf.begin_phase, conf.end_phase,
                                       conf.inv_response_frequencyband)

    for event_name in event_names:
        conf.event_name = event_name
        event = _get_event_infos(conf)
        tevent = event.time

        station_query_save_path = conf.path('station_query_save_path')
        if os.path.exists(station_query_save_path):
            logger.info('Using stored station query.')
            all_stations = pload(station_query_save_path)
        else:
            logger.info('Querying for stations...')
            all_stations = combi_get_stations(
                lat=event.lat,
                lon=event.lon,
                rmin=conf.query_rmin,
                rmax=conf.query_rmax,
                tmin=tevent,
                tmax=tevent + 3600.,
                channel_pattern=conf.query_channel_pattern)

            util.ensuredirs(station_query_save_path)
            pdump(all_stations, station_query_save_path)

        nstations = len(set(
            (sta.network, sta.station) for sta in all_stations))
        logger.info('Station query returned %i station%s' %
                    (nstations, plural_s(nstations)))
        preferred_ns = set(
            iris_get_vnets(conf.preferred_virtual_networks,
                           tmin=tevent,
                           tmax=tevent + 3600.))
        preferred_n = set(conf.preferred_networks)

        for station in all_stations:
            station.set_event_relative_data(event)

        raw_trace_path = conf.path('raw_trace_path')

        nsl_all = set(get_nsl(s) for s in all_stations)

        state_save_path = conf.path('state_save_path')
        if os.path.exists(state_save_path):
            nsl_ok, nsl_blacklist, nsl_use = pload(state_save_path)
        else:
            nsl_ok = set()
            nsl_blacklist = set()
            nsl_use = set()

        manual_blacklist_path = conf.path('manual_blacklist_path')
        nsl_blacklist.update(read_manual_blacklist(manual_blacklist_path))

        nsl_selected = set()
        nwanted = conf.get_or_none('nwanted')
        assert len(nwanted) == 2

        while True:
            if nwanted:
                selected = select_stations(all_stations,
                                           nwanted[1],
                                           preferred_n=preferred_n,
                                           preferred_ns=preferred_ns,
                                           preferred_nsl=nsl_ok,
                                           blacklist_nsl=nsl_blacklist)
            else:
                selected = all_stations

            nsl_selected = set(get_nsl(s) for s in selected)

            download = [s for s in selected if get_nsl(s) not in nsl_ok]
            nsl_download = set(get_nsl(s) for s in download)

            combi_get_responses(download, tevent, conf.path('resp_path'))

            tmin = preparator.get_tmin_limit(event, download)
            tmax = preparator.get_tmax_limit(event, download)

            logger.info('Loading data for event %s:\n%s' %
                        (event_name, str_nsl_selection(nsl_download)))
            try:
                fns = combi_get_data(
                    download,
                    tmin,
                    tmax,
                    raw_trace_path,
                    neach=conf.query_nstations_per_datarequest)
                p = pile.make_pile(fns, show_progress=False)
                prepared_trace_path = conf.path('prepared_trace_path')
                for traces in preparator.iter_prepare(p, event, download):
                    for tr in traces:
                        nsl_ok.add(get_nsl(tr))
                    io.save(traces, prepared_trace_path)

            except iris_ws.NotFound:
                pass

            logger.info('Blacklisting:\n%s' %
                        str_nsl_selection(nsl_download - nsl_ok))

            nsl_blacklist.update(nsl_download - nsl_ok)
            preferred_ns.update(set(nsl[:2] for nsl in nsl_ok))

            nsl_use = nsl_ok & nsl_selected

            logger.info('Have %i stations with data.' % len(nsl_use))

            if not nwanted:
                break

            else:
                if len(nsl_selected) == len(nsl_all):
                    break

                if len(nsl_use) > nwanted[0]:
                    break

        pdump((nsl_ok, nsl_blacklist, nsl_use), state_save_path)

        if nwanted and len(nsl_use) >= nwanted[1]:
            selected = select_stations(selected,
                                       nwanted[1],
                                       preferred_n=preferred_n,
                                       preferred_ns=preferred_ns,
                                       blacklist_nsl=nsl_blacklist)
            nsl_selected = set(get_nsl(s) for s in selected)
            nsl_use = nsl_ok & nsl_selected

        stations = [s for s in all_stations if get_nsl(s) in nsl_use]

        model.dump_stations(stations, conf.path('stations_ok_path'))
        model.dump_stations(all_stations, conf.path('stations_all_path'))

        if nsl_use:
            logger.info('Data available for event %s:\n%s' %
                        (event_name, str_nsl_selection(nsl_use)))
        else:
            logger.info('No data availabe for event %s' % event_name)
Esempio n. 27
0
 def save(self):
     default_fn = 'BeamTraces_baz%s_slow%s.mseed' % (self.bazi, self.slow)
     fn = self.output_filename('Template for output files', default_fn)
     io.save(self.stacked.values(), fn)
Esempio n. 28
0
        arrival_times=arrival_times,
        wavename='any_P',
        arrival_taper=arrival_taper,
        filterer=filterer,
        outmode='stacked_traces')

all_traces = traces +  synth_traces_nn_t + synth_traces_ml_t+ synth_traces_nn + synth_traces_ml

# display to check
trace.snuffle(
    all_traces,
    stations=sc.wavemaps[0].stations, events=[event])

if False:
    from pyrocko.io import save
    save(all_traces, 'traces_%s.yaff' % tshift_str, format='yaff')


if False:
    traces1, tmins = heart.seis_synthetics(
        engine, [patches[0]], targets, arrival_times=ats,
        wavename='any_P', arrival_taper=arrival_taper,
        filterer=filterer, outmode='stacked_traces')

    gfs.set_stack_mode('numpy')

    synth_traces_ml1 = []
    for i in range(1):
        synthetics_ml1 = gfs.stack_all(
            targetidxs=targetidxs,
            patchidxs=[i],
Esempio n. 29
0
from pyrocko import trace, util, io
import numpy as num

nsamples = 100
tmin = util.str_to_time('2010-02-20 15:15:30.100')
data1 = num.random.random(nsamples)
data2 = num.random.random(nsamples)
t1 = trace.Trace(network='ref',
                 station='TEST',
                 channel='Z',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data1)
t2 = trace.Trace(network='ref',
                 station='TEST',
                 channel='N',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data2)
io.save([t1, t2],
        'mseeds/referenceRandomTestFiles/my_precious_reference_traces.mseed'
        )  # all traces in one file
Esempio n. 30
0
    def export(self,
               point,
               results_path,
               stage_number,
               fix_output=False,
               force=False,
               update=False,
               chop_bounds=['b', 'c']):
        """
        Save results for given point to result path.
        """
        def save_covs(wmap, cov_mat='pred_v'):
            """
            Save covariance matrixes of given attribute
            """
            covs = {
                utility.list2string(dataset.nslc_id):
                getattr(dataset.covariance, cov_mat)
                for dataset in wmap.datasets
            }

            outname = os.path.join(
                results_path, '%s_C_%s_%s' % ('seismic', cov_mat, wmap._mapid))
            logger.info('"%s" to: %s' % (wmap._mapid, outname))
            num.savez(outname, **covs)

        from pyrocko import io

        # synthetics and data
        results = self.assemble_results(point, chop_bounds=chop_bounds)
        for traces, attribute in heart.results_for_export(results=results,
                                                          datatype='seismic'):

            filename = '%s_%i.mseed' % (attribute, stage_number)
            outpath = os.path.join(results_path, filename)
            try:
                io.save(traces, outpath, overwrite=force)
            except io.mseed.CodeTooLong:
                if fix_output:
                    for tr in traces:
                        tr.set_station(tr.station[-5::])
                        tr.set_location(
                            str(self.config.gf_config.reference_model_idx))

                    io.save(traces, outpath, overwrite=force)
                else:
                    raise ValueError(
                        'Some station codes are too long! '
                        '(the --fix_output option will truncate to '
                        'last 5 characters!)')

        # export stdz residuals
        self.analyse_noise(point, chop_bounds=chop_bounds)
        if update:
            logger.info('Saving velocity model covariance matrixes...')
            self.update_weights(point, chop_bounds=chop_bounds)
            for wmap in self.wavemaps:
                save_covs(wmap, 'pred_v')

        logger.info('Saving data covariance matrixes...')
        for wmap in self.wavemaps:
            save_covs(wmap, 'data')
Esempio n. 31
0
from pyrocko import trace, util, io
import numpy as num

nsamples = 100
tmin = util.str_to_time('2010-02-20 15:15:30.100')
data = num.random.random(nsamples)
t1 = trace.Trace(station='TEST',
                 channel='Z',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data)
t2 = trace.Trace(station='TEST',
                 channel='N',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data)

# all traces in one file
io.save([t1, t2], 'my_precious_traces.mseed')

# each file one channel
io.save([t1, t2], 'my_precious_trace_%(channel)s.mseed')
Esempio n. 32
0
 def save(self, traces, fn="beam.pf"):
     io.save(traces, fn)
Esempio n. 33
0
    def call(self):
        self.cleanup()

        if self.tinc is not None:
            template = \
                'trace_%n.%s.%l.%c_%(tmin_us)s'
        else:
            template = 'trace_%n.%s.%l.%c'

        if self.format == 'text':
            default_output_filename = template + '.dat'

        else:
            default_output_filename = template + '.' + self.format

        out_filename = self.output_filename('Template for output files',
                                            default_output_filename)

        viewer = self.get_viewer()
        for trs in self.chopper_selected_traces(fallback=True, tinc=self.tinc):
            traces_save = []
            for tr in trs:
                if self.format == 'mseed':
                    if len(tr.network) > 2:
                        tr.set_network(tr.network[:2])
                    if len(tr.station) > 5:
                        tr.set_station(tr.station[:5])
                    if len(tr.location) > 2:
                        tr.set_location(tr.location[:2])
                    if len(tr.channel) > 3:
                        tr.set_channel(tr.channel[:3])

                if self.apply_filter:
                    if viewer.lowpass is not None and \
                            viewer.highpass is not None:
                        tr.bandpass(2, viewer.highpass, viewer.lowpass)

                    elif viewer.lowpass is not None:
                        if viewer.lowpass < 0.5 / tr.deltat:
                            tr.lowpass(4, viewer.lowpass, demean=False)

                    elif viewer.highpass is not None:
                        if viewer.highpass < 0.5 / tr.deltat:
                            tr.highpass(4, viewer.highpass, demean=False)

                traces_save.append(tr)

        if viewer.rotate != 0.0 and self.apply_filter:
            phi = viewer.rotate / 180. * math.pi
            cphi = math.cos(phi)
            sphi = math.sin(phi)
            for a in traces_save:
                for b in traces_save:
                    if (a.network == b.network and a.station == b.station
                            and a.location == b.location and
                        ((a.channel.lower().endswith('n')
                          and b.channel.lower().endswith('e')) or
                         (a.channel.endswith('1') and b.channel.endswith('2')))
                            and abs(a.deltat - b.deltat) < a.deltat * 0.001
                            and abs(a.tmin - b.tmin) < a.deltat * 0.01
                            and a.get_ydata().size == b.get_ydata().size):

                        aydata = a.get_ydata() * cphi + b.get_ydata() * sphi
                        bydata = -a.get_ydata() * sphi + b.get_ydata() * cphi
                        a.set_ydata(aydata)
                        b.set_ydata(bydata)

        try:
            io.save(traces_save,
                    out_filename,
                    format=self.format,
                    overwrite=True)

        except io.io_common.FileSaveError as e:
            self.fail(str(e))

        logger.info('saved waveforms to %s', out_filename)

        if self.save_stations:
            stations = viewer.stations.values()
            fn = self.output_filename('Save Stations', 'stations.pf')
            model.dump_stations(list(stations), fn)
            logger.info('saved stations to %s', fn)
Esempio n. 34
0
                # data set

                # deconvolve transfer function
                restituted = tr.transfer(
                tfade=2.,
                freqlimits=(0.01, 0.1, 1., 2.),
                transfer_function=polezero_response,
                invert=True)

                displacement_iris.append(restituted)
                stations_disp_iris.append(station)
            except:
                pass


io.save(displacement_iris, os.path.join(sdspath,'traces_restituted_iris.mseed'))
model.dump_stations(stations_disp_iris, os.path.join(sdspath,'stations_disp_iris.txt'))

site = 'geofon'
minDist, maxDist = cfg.FloatRange('mindist', 'maxdist')
diffDist =(maxDist - minDist)/6.
displacement_geofon = []
stations_disp_geofon = []
stations_real_geofon = []
gaps= []
try:
    for l in range(0,6):
        maxDist = minDist+diffDist
        stations_geofon = get_stations(site, event.lat,event.lon,minDist, maxDist,tmin,tmax, 'BHZ')

        nstations_geofon = [s for s in stations_geofon]
Esempio n. 35
0
def Stack():
    cmd = 'mkdir {}'.format(middle_for_stack)
    os.system(cmd)

    for i in range(int(min_day), int(max_day) + 1, 1):
        d1 = join(middle_for_stack, year + str(i))
        d2 = join(middle_for_stack, year + str(i))
        d3 = join(middle_for_stack, year + str(i))
        cmd = 'mkdir {} {} {}'.format(d1, d2, d3)
        os.system(cmd)
        for num in range(1, 41):
            num = str(num).zfill(2)
            dd1 = join(middle_for_stack, year + str(i), 'ZDY' + num + '.SHE')
            dd2 = join(middle_for_stack, year + str(i), 'ZDY' + num + '.SHN')
            dd3 = join(middle_for_stack, year + str(i), 'ZDY' + num + '.SHZ')
            #print(d)
            cmd = 'mkdir {} {} {}'.format(dd1, dd2, dd3)
            os.system(cmd)

    print(
        'Reattribute all single shots with the same channel into one directory :'
    )
    for f in tqdm(glob(join(dire, '*.SAC'))):
        filename = basename(f)
        datetime = filename.split('.')[0]
        station = filename.split('.')[3]
        channel = filename.split('.')[5]
        for d in glob(join(middle_for_stack, '201*')):
            dt = basename(d)
            if (datetime == dt):
                for dd in glob(join(d, 'ZDY*')):
                    ss = basename(dd).split('.')[0]
                    cc = basename(dd).split('.')[1]
                    if (station == ss and channel == cc):
                        cmd = 'mv {} {}'.format(f, dd)
                        os.system(cmd)
                        #print(f)
                        break
                    else:
                        continue
                break
            else:
                continue

    print('Stack records:')
    for d in tqdm(glob(join(middle_for_stack, '201*'))):
        for dd in glob(join(d, 'ZDY*')):
            staname = basename(dd).split('.')[0]
            chaname = basename(dd).split('.')[1]
            # for root,dirs,files in os.walk(d):
            #     for f in files:
            #         if os.path.splitext(f)[1] == '.SAC':
            #             count += 1
            stacknumber = 0
            p = subprocess.Popen(['sac'], stdin=subprocess.PIPE)
            s = ''
            s += 'sss\n'
            s += 'zerostack\n'
            stlo = 0
            stla = 0
            for f in glob(join(dd, '*.SAC')):
                cmd = 'saclst stlo stla f %s ' % (f)
                junk, stlo, stla = os.popen(cmd).read().split()
                s += 'addstack %s\n' % (f)
                stacknumber += 1
            name = ['DISPL', staname, chaname, 'SAC']
            name = '.'.join(name)
            s += 'timewindow -20 180\n'
            s += 'sumstack\n'
            s += 'writestack %s\n' % (name)
            s += 'quitsub\n'
            s += 'r %s\n' % (name)
            if int(year) == 2015:
                s += 'ch nzyear 2015 nzjday 290 nzhour 00 nzmin 00 nzsec 00 nzmsec 000\n'
            elif int(year) == 2016:
                s += 'ch nzyear 2016 nzjday 291 nzhour 00 nzmin 00 nzsec 00 nzmsec 000\n'
            elif int(year) == 2017:
                s += 'ch nzyear 2017 nzjday 293 nzhour 00 nzmin 00 nzsec 00 nzmsec 000\n'
            s += 'ch evlo 100.098 evla 38.7485 stlo %.6f stla %.6f\n' % (
                float(stlo), float(stla))
            # s += 'ch o 20\n'
            s += 'wh\n'
            s += 'q\n'
            p.communicate(s.encode())
            cmd = 'rm -r %s' % (dd)
            os.system(cmd)

        print('Convert the file format from SAC into MiniSEED:')
        for f in tqdm(glob('DISPL.*')):
            traces = io.load(f, format='sac')
            out_filename = f[:-4] + '.mseed'
            io.save(traces, out_filename)

        print('Rename the MiniSEED files and Reattribution:')
        cmd = ''
        for f in tqdm(glob('*.mseed')):
            amplitude_type = basename(f).split('.')[0]
            staname = basename(f).split('.')[1]
            chaname = basename(f).split('.')[2]
            name = [amplitude_type, staname, chaname]
            name = '.'.join(name)
            os.rename(f, join(dire, name))

        cmd = 'mv %s %s\n' % ('*.SH?', d)
        cmd += 'rm DISPL.*\n'
        os.system(cmd)
Esempio n. 36
0
 def savetraces(self):
     for trace in self.trs:
         io.save(trace, 'synthetics_depth{0}km_dist{1}km_azi{2}deg_{3}.mseed'.format(str(int(self.source_depth)/1000).zfill(3), str(int(self.dist)/1000).zfill(3), str(int(self.azi)).zfill(4), trace.channel))
Esempio n. 37
0
#!/usr/bin/env python

from pyrocko import io
import sys

for filename in sys.argv[1:]:
    traces = io.load(filename, format='sac')
    if filename.lower().endswith('.sac'):
        out_filename = filename[:-4] + '.mseed'
    else:
        out_filename = filename + '.mseed'

    io.save(traces, out_filename)

Esempio n. 38
0
    def call(self):
        self.cleanup()

        try:
            viewer = self.get_viewer()
            lowpass = viewer.lowpass
            highpass = viewer.highpass

            markers = viewer.selected_markers()
            if not markers:
                return

            if len(markers) != 1:
                return

            marker = markers[0]
            master_tmin, master_tmax = marker.tmin, marker.tmax
            if master_tmin >= master_tmax:
                return

        except NoViewerSet:
            viewer = None
            master_tmin, master_tmax = self.master_tmin, self.master_tmax

        tpad = 0.
        if highpass:
            tpad = 1. / highpass

        pile = self.get_pile()
        masters = {}

        def preprocess(_tr):
            if self.downsample:
                _tr.downsample_to(1. / self.downsample)

            if highpass:
                _tr.highpass(4, highpass)

            if lowpass:
                _tr.lowpass(4, lowpass)

        for tr in pile.all(tmin=master_tmin, tmax=master_tmax, tpad=tpad):
            for m in markers:
                if m.match_nslc(tr.nslc_id):
                    preprocess(tr)
                    tr.chop(tr.wmin, tr.wmax)
                    masters[tr.nslc_id] = tr
                    break

        if self.apply_to_all:
            tmin, tmax = pile.get_tmin() + tpad, pile.get_tmax()
        else:
            tmin, tmax = self.get_viewer().get_time_range()

        normalization = {
            'Off': None,
            'Normal': 'normal',
            'Gliding': 'gliding'
        }[self.normalization]

        for traces in pile.chopper(tmin=tmin, tmax=tmax, want_incomplete=True):
            sccs = defaultdict()
            sccn = defaultdict()
            for b in traces:
                nslc = b.nslc_id
                if nslc in masters:
                    a = masters[nslc]
                    preprocess(b)

                    c = trace.correlate(a,
                                        b,
                                        mode='valid',
                                        normalization=normalization,
                                        use_fft=self.use_fft)

                    c.shift(-c.tmin + b.tmin)
                    c.meta = {'tabu': True}

                    scc = sccs.get(nslc, None)
                    if not scc:
                        scc = c.copy()
                        scc.meta = {'tabu': True}
                        scc.wmin = b.wmin
                        scc.wmax = b.wmax
                        scc.set_codes(location=scc.location + '_SUM')
                        sccn[nslc] = 1
                    else:
                        sccn[nslc] += 1

                    sccs[nslc] = scc

            for nslc_id, scc in sccs.items():
                scc.ydata /= sccn[nslc_id]
                scc.chop(scc.wmin, scc.wmax)

                markers = []
                for t, a in zip(
                        *scc.peaks(self.threshold, tsearch=self.tsearch)):
                    m = PhaseMarker(tmin=t,
                                    tmax=t,
                                    phasename='%1.3f' % a,
                                    kind=3,
                                    nslc_ids=(nslc_id, ))
                    markers.append(m)

                if viewer:
                    self.add_traces([scc])
                    self.add_markers(markers)
                else:
                    io.save([scc], self.out_path, format='from_extension')
Esempio n. 39
0
def run(infile, outfn, station, t0,samplingrate, chan='n', nw='',loc=''):


    channels = 'NESW'

    #read in data:
    data = []
    print '\t reading file {0} ... '.format(infile)
    Fin = open(infile)
    for line in Fin:
        if line.strip()[0] == '#':
            continue
        data.append([int(float(i)) for i in line.strip().split()])
    Fin.close()
    data = array(data,dtype=int32)
    #print '...done!'

    stationname = station 
    deltat = 1./samplingrate
    
    lo_traces = []

    lo_chans = []
    try:
        lo_chans = list(chan.lower())
    except:
        lo_chans = ['n']

    print '\t building MiniSeed trace object(s)...'
    if len(lo_chans)>1:
        try:
            for c in lo_chans:
                location = c.upper()
                idx_ch = channel_dict[c]
                if idx_ch in [0,2]:
                    channel = 'NS'
                else:
                    channel = 'EW'

            print 'station {1} - channel {0} - location {2}'.format(channel, stationname, location)
            if idx_ch in [0,1]:
                lo_traces.append( trace.Trace(station=stationname, channel=channel, 
                    location=location, deltat=deltat, tmin=t0, ydata=data[:,idx_ch]) )
            else:   
                #correct for polarity of 'redundant' channels
                lo_traces.append( trace.Trace(station=stationname, channel=channel, 
                    location=location, deltat=deltat, tmin=t0, ydata=-data[:,idx_ch]) )
        except:
            lo_chans = [chan.lower()[0]]
            lo_traces = []

    if len(lo_chans) == 1:
        idx_ch = channel_dict[lo_chans[0]]
        location = channels[idx_ch]
        if idx_ch in [0,2]:
            channel = 'NS'
        else:
            channel = 'EW'

        if idx_ch in [2,3]:
            data *= -1

        lo_traces = [trace.Trace(station=stationname, channel=channel, 
                    location=location, deltat=deltat, tmin=t0, ydata=data)]

    #pdb.set_trace()
    #print '...done!'

    print '\t writing file %s ... '%outfn
    io.save(lo_traces, outfn)
    print '\t ...done'
    return
                  mechanisms=mechanisms,
                  magnitudes=magnitudes,
                  stf=stf)


    # setup stations/targets:
    #stats = load_stations(webnet+'/meta/stations.pf')
    stats = load_stations('stations.pf')
    #stats = []
    # Scrutinize the swarm using matplotlib

    noise = Noise(files='/media/usb/webnet/mseed/2008')

    # convert loaded stations to targets (see function at the top).
    #targets = guess_targets_from_stations(stats)
    targets = get_targets(stations, noise.data_pile, store_id=store_id)
    Visualizer(swarm, stats)

    # Processing that data will return a pyrocko.gf.seismosizer.Reponse object.
    response = engine.process(sources=swarm.get_sources(),
                              targets=targets)

    # Save the events
    dump_events(swarm.get_events(), 'events_swarm.pf')
    io.save(response.pyrocko_traces(), 'swarm.mseed')

    convolved_traces = stf.post_process(response)

    # Save traces:
    io.save(convolved_traces.traces_list(), 'swarm_stf.mseed')
Esempio n. 41
0
    def call(self):
        '''Main work routine of the snuffling.'''

        self.cleanup()

        period_highpass = 1. / self.corner_highpass
        tpad = period_highpass

        try:
            viewer = self.get_viewer()
            markers = viewer.selected_markers()
            if not markers:
                return

            if len(markers) != 1:
                return

            marker = markers[0]
            master_tmin, master_tmax = marker.tmin, marker.tmax
            if master_tmin >= master_tmax:
                return

        except NoViewerSet:
            viewer = None
            master_tmin, master_tmax = self.master_tmin, self.master_tmax

        pile = self.get_pile()
        masters = {}
        for tr in pile.all(tmin=master_tmin, tmax=master_tmax, tpad=tpad):
            if self.downsample is not None:
                tr.downsample_to(1. / self.downsample)
            tr.highpass(4, self.corner_highpass)
            tr.lowpass(4, self.corner_lowpass)
            tr.chop(tr.wmin, tr.wmax)
            masters[tr.nslc_id] = tr

        if self.apply_to_all:
            tmin, tmax = pile.get_tmin() + tpad, pile.get_tmax()
        else:
            tmin, tmax = self.get_viewer().get_time_range()

        tmaster = master_tmax - master_tmin
        tinc = min(20 * tmaster, max(tmaster, tmax - tmin))

        for traces in pile.chopper(tmin=tmin,
                                   tmax=tmax,
                                   tinc=tinc,
                                   tpad=tmaster + tpad,
                                   want_incomplete=False):
            scc = None
            sccn = 0
            for b in traces:
                nslc = b.nslc_id
                if nslc in masters:
                    a = masters[nslc]
                    if self.downsample is not None:
                        b.downsample_to(1. / self.downsample)
                    b.highpass(4, self.corner_highpass)
                    tr.lowpass(4, self.corner_lowpass)
                    normalization = {
                        'Off': None,
                        'Normal': 'normal',
                        'Gliding': 'gliding'
                    }[self.normalization]
                    c = trace.correlate(a,
                                        b,
                                        mode='valid',
                                        normalization=normalization)
                    c.shift(-c.tmin + b.tmin)
                    c.meta = {'tabu': True}

                    if scc is None:
                        scc = c.copy()
                        scc.wmin = b.wmin
                        scc.wmax = b.wmax
                        scc.set_codes(network='',
                                      station='Sum Cross Correlation',
                                      location='',
                                      channel='')
                        sccn = 1

                    else:
                        scc.add(c)
                        sccn += 1

            if scc is not None:
                scc.ydata /= sccn
                scc.chop(scc.wmin, scc.wmax)

                markers = []
                for t, a in zip(*scc.peaks(self.threshold,
                                           tsearch=2. / self.corner_highpass)):
                    m = EventMarker(
                        model.Event(time=t,
                                    lat=0.,
                                    lon=0.,
                                    name='Event(%.2g)' % a))
                    markers.append(m)

                if viewer:
                    self.add_traces([scc])
                    self.add_markers(markers)
                else:
                    io.save([scc], self.out_path, format='from_extension')
Esempio n. 42
0
from pyrocko import gse, io, util

util.setup_logging('test_gse', 'debug')

for gse in gse.readgse('test.gse'):
    print gse
    tr = gse.waveforms[0].trace()
    io.save([tr], 'aa.mseed')
Esempio n. 43
0
from pyrocko.example import get_example_data

# Download example data
get_example_data('STS2-Generic.polezero.txt')
get_example_data('test.mseed')

# read poles and zeros from SAC format pole-zero file
zeros, poles, constant = pz.read_sac_zpk('STS2-Generic.polezero.txt')

# one more zero to convert from velocity->counts to displacement->counts
zeros.append(0.0j)

rest_sts2 = trace.PoleZeroResponse(zeros=zeros, poles=poles, constant=constant)

traces = io.load('test.mseed')
out_traces = list(traces)
for tr in traces:

    displacement = tr.transfer(
        1000.,  # rise and fall of time window taper in [s]
        (0.001, 0.002, 5., 10.),  # frequency domain taper in [Hz]
        transfer_function=rest_sts2,
        invert=True)  # to change to (counts->displacement)

    # change channel id, so we can distinguish the traces in a trace viewer.
    displacement.set_codes(channel='D' + tr.channel[-1])

    out_traces.append(displacement)

io.save(out_traces, 'displacement.mseed')
Esempio n. 44
0
 def save(self):
     default_fn = 'BeamTraces_baz%s_slow%s.mseed' % (self.bazi, self.slow)
     fn = self.output_filename('Template for output files', default_fn)
     io.save((self.stacked.values()), fn)
Esempio n. 45
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--raw_only',
                      dest='raw_only',
                      action='store_true',
                      default=False,
                      help='Skip restitution of data')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=15.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='iris,geofon',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option(
        '--nstations-wanted',
        dest='nstations_wanted',
        default=600,
        metavar='N',
        type=int,
        help='number of stations to select initially, default is set to\
              600 to minimize downloading time.')

    parser.add_option('--magmin',
                      dest='magmin',
                      metavar='VALUE',
                      default=6.0,
                      type=float,
                      help='minimum magnitude of events')

    parser.add_option('--minlen',
                      dest='minlen',
                      metavar='VALUE',
                      default=100.0,
                      type=float,
                      help='minimum length of traces')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    (options, args) = parser.parse_args(sys.argv[1:])
    magmin = options.magmin
    minlen = options.minlen
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 2

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon,
                                                magmin=magmin)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join('events', cwd, 'data')
        output_dir = op.join(event_dir)
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)
        tmin = tmin - options.padding_factor
        tmax = tmax + options.padding_factor

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad + 1000.
    tmax += tpad + 1000.

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('?%s?' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    priority_units = ['M/S', 'M', 'M/S**2']

    output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'palantiriown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = ws.station(site=site,
                            format='text',
                            level='channel',
                            **extra_args)

        except ws.EmptyResult:
            logger.error('No stations matching given criteria.')
            sx = None

        sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))
    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)

        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None
                    try:
                        tmin_, tmax_ = timewindow(time_, dist, depth_)

                        tmin_this = tmin_ - tpad
                        tmax_this = tmax_ + tpad

                        tmin_req = max(tmin_win, tmin_this)
                        tmax_req = min(tmax_win, tmax_this)

                        if channel.sample_rate:
                            deltat = 1.0 / channel.sample_rate.value
                        else:
                            deltat = 1.0

                        if tmin_req < tmax_req:
                            # extend time window by some samples because otherwise
                            # sometimes gaps are produced
                            selection.append(nslc + (tmin_req - deltat * 10.0,
                                                     tmax_req + deltat * 10.0))
                    except:
                        pass
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))

                        data = ws.dataselect(site=site,
                                             selection=selection_now,
                                             **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except ws.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = ws.station(site=site,
                                   level='response',
                                   selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = station.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    #nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units)

                    break

                except station.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except station.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData,
                        trace.InfiniteResponse):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            try:
                rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                             deoverlap='crossfade_cos')

                fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
                rest_traces_a = []
                if win_a:
                    for tr in rest_traces:
                        try:
                            rest_traces_a.append(
                                tr.chop(win_a[0],
                                        win_a[1] + otpad,
                                        inplace=False))
                        except trace.NoData:
                            pass
            except:
                pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations_prep = op.join(output_dir, 'stations_disp.txt')
    fn_event = op.join(event_dir, 'event.txt')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)
        traces = trace.degapper(traces, maxgap=50, maxlap=50)
        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn('Cannot downsample %s.%s.%s.%s: %s' %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False
        trss = []
        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)
                        trss.append(tr)
            if proc:
                if tr.tmax - tr.tmin >= minlen:
                    io.save(proc, fn_waveforms)
                    used_stations.append(s)

        gaps = []

    prep_stations = list(used_stations)
    prep_stations_one = []
    cluster_stations_one = []
    prep_stations_cluster = prep_stations.copy()
    for st in prep_stations:
        for channel in [
                'BHE', 'BHN', 'BHZ', 'BH1', 'BH2', 'HHE', 'HHN', 'HHZ'
        ]:
            try:
                st.remove_channel_by_name(channel)
            except:
                pass
        prep_stations_one.append(st)

    for st in prep_stations_cluster:
        for channel in ['R', 'T']:
            try:
                st.remove_channel_by_name(channel)
            except:
                pass
        cluster_stations_one.append(st)

    util.ensuredirs(fn_stations_prep)
    model.dump_events([event], fn_event)
    from subprocess import call
    script = "cat" + " " + output_dir + "/rest/*.mseed" + "> " + output_dir + "/traces.mseed"
    call(script, shell=True)
    #    script = "cat"+" "+ output_dir+"/prepared/*..*" +"> "+ output_dir+"/traces_rotated.mseed"
    #    call(script, shell=True)

    traces = io.load(output_dir + "/traces.mseed")

    for tr in traces:
        tr.set_ydata(num.diff(tr.ydata) / tr.deltat)
    io.save(traces, output_dir + "/traces_velocity.mseed")
    cluster_stations_ones = []
    for st in cluster_stations_one:
        add = 0
        for tr in traces:
            if st.station == tr.station:
                add = 1
                for stx in cluster_stations_ones:
                    if stx.station == st.station:
                        add = 0
                if add == 1:
                    cluster_stations_ones.append(st)

    prep_stations_ones = []
    for st in prep_stations_one:
        add = 0
        for tr in traces:
            if st.station == tr.station:
                add = 1
                for stx in prep_stations_ones:
                    if stx.station == st.station:
                        add = 0
                if add == 1:
                    prep_stations_ones.append(st)
    gaps = []
    remove = []

    for tr in traces:
        for st in cluster_stations_ones:
            for channel in st.channels:
                if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
                    gaps.append(st.station)

    remove = [x for x in gaps if gaps.count(x) > 1]
    for re in remove:
        for st in cluster_stations_ones:
            if st.station == re:
                try:
                    cluster_stations_ones.remove(st)
                except Exception:
                    pass

    gaps = []
    remove = []
    for tr in traces:
        for st in prep_stations_ones:
            for channel in st.channels:
                if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
                    gaps.append(st.station)
    remove = [x for x in gaps if gaps.count(x) > 1]
    for re in remove:
        for st in prep_stations_ones:
            if st.station == re:
                try:
                    prep_stations_ones.remove(st)
                except Exception:
                    pass
    fn_stations_cluster = op.join(output_dir, 'stations_cluster.txt')
    model.dump_stations(prep_stations_cluster, fn_stations_prep)
    model.dump_stations(prep_stations_cluster, fn_stations_cluster)

    logger.info('prepared waveforms from %i stations' %
                len(prep_stations_cluster))
Esempio n. 46
0
WD = op.abspath(op.realpath(os.curdir))

os.chdir(mseed_path)

lo_mseeds = glob.glob('*')

os.chdir(WD)

outpathname = 'corrected_traces'
outpath = op.abspath(op.realpath(op.join('.', outpathname)))

if not op.exists(outpath):
    os.makedirs(outpath)

for F in lo_mseeds:
    in_fn = op.abspath(op.realpath(op.join(mseed_path, F)))
    out_fn = op.abspath(op.realpath(op.join(outpath, F)))

    traces = io.load(in_fn)

    for tr in traces:
        stat = tr.station
        channel = tr.channel.upper()[-1]
        fac = ampl_dict[stat][channel]
        tr.ydata = fac * tr.get_ydata() / 10**9

        print F, stat, channel, fac

    io.save(traces, out_fn)
Esempio n. 47
0
def command_extract(args):
    def setup(parser):
        parser.add_option(
            '--format',
            dest='format',
            default='mseed',
            choices=['mseed', 'sac', 'text', 'yaff'],
            help='export to format "mseed", "sac", "text", or "yaff". '
            'Default is "mseed".')

        fndfl = 'extracted/%(irecord)s_%(args)s.%(extension)s'
        parser.add_option('--output',
                          dest='output_fn',
                          default=fndfl,
                          metavar='TEMPLATE',
                          help='output path template [default: "%s"]' % fndfl)

    parser, options, args = cl_parse('extract', args, setup=setup)
    try:
        sdef = args.pop()
    except Exception:
        parser.error('cannot get <selection> argument')

    try:
        gdef = gf.meta.parse_grid_spec(sdef)
    except gf.meta.GridSpecError as e:
        die(e)

    store_dir = get_store_dir(args)

    extensions = {
        'mseed': 'mseed',
        'sac': 'sac',
        'text': 'txt',
        'yaff': 'yaff'
    }

    try:
        store = gf.Store(store_dir)
        for args in store.config.iter_extraction(gdef):
            gtr = store.get(args)
            if gtr:
                tr = trace.Trace('',
                                 '',
                                 '',
                                 util.zfmt(store.config.ncomponents) %
                                 args[-1],
                                 ydata=gtr.data,
                                 deltat=gtr.deltat,
                                 tmin=gtr.deltat * gtr.itmin)

                additional = dict(args='_'.join('%g' % x for x in args),
                                  irecord=store.str_irecord(args),
                                  extension=extensions[options.format])

                io.save(tr,
                        options.output_fn,
                        format=options.format,
                        additional=additional)

    except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e:
        die(e)
Esempio n. 48
0
    def _make_test_ahfull_kiwi_data(self):
        trs_all = []
        setups = []
        for i in range(100):
            s = AhfullKiwiTestSetupEntry(
                vp=3600.,
                vs=2000.,
                density=2800.,
                x=(rand(100., 1000.), rand(100., 1000.), rand(100., 1000.)),
                f=(rand(-1., 1.), rand(-1., 1.), rand(-1., 1.)),
                m6=tuple(rand(-1., 1.) for _ in range(6)),
                tau=0.005,
                deltat=0.001)

            def dump(stuff, fn):
                with open(fn, 'w') as f:
                    f.write(' '.join('%s' % x for x in stuff))
                    f.write('\n')

            dn = mkdtemp(prefix='test-ahfull-')
            fn_sources = op.join(dn, 'sources.txt')
            fn_receivers = op.join(dn, 'receivers.txt')
            fn_material = op.join(dn, 'material.txt')
            fn_stf = op.join(dn, 'stf.txt')

            dump((0., 0., 0., 0.) + s.m6 + s.f, fn_sources)
            dump(s.x + (1, 1), fn_receivers)
            dump((s.density, s.vp, s.vs), fn_material)

            nstf = int(round(s.tau * 5. / s.deltat))
            t = num.arange(nstf) * s.deltat
            t0 = nstf * s.deltat / 2.
            stf = num.exp(-(t-t0)**2 / (s.tau/math.sqrt(2.))**2)

            stf = num.cumsum(stf)
            stf /= stf[-1]
            stf[0] = 0.0

            data = num.vstack((t, stf)).T
            num.savetxt(fn_stf, data)

            check_call(
                ['ahfull', fn_sources, fn_receivers, fn_material, fn_stf,
                 '%g' % s.deltat, op.join(dn, 'ahfull'), 'mseed', '0'],
                stdout=open('/dev/null', 'w'))

            fns = [op.join(dn, 'ahfull-1-%s-1.mseed' % c) for c in 'xyz']

            trs = []
            for fn in fns:
                trs.extend(io.load(fn))

            for tr in trs:
                tr.set_codes(
                    station='S%03i' % i,
                    channel={'x': 'N', 'y': 'E', 'z': 'D'}[tr.channel])
                tr.shift(-round(t0/tr.deltat)*tr.deltat)

            trs_all.extend(trs)
            setups.append(s)

        setup = AhfullKiwiTestSetup(setups=setups)

        setup.dump(filename=common.test_data_file_no_download(
            'test_ahfull_kiwi_setup.yaml'))
        io.save(trs_all, common.test_data_file_no_download(
            'test_ahfull_kiwi_traces.mseed'))
Esempio n. 49
0
def search(config,
           override_tmin=None,
           override_tmax=None,
           show_detections=False,
           show_movie=False,
           show_window_traces=False,
           force=False,
           stop_after_first=False,
           nparallel=6,
           save_imax=False,
           bark=False):

    fp = config.expand_path

    run_path = fp(config.run_path)

    # if op.exists(run_path):
    #     if force:
    #         shutil.rmtree(run_path)
    #     else:
    #         raise common.LassieError(
    #             'run directory already exists: %s' %
    #             run_path)

    util.ensuredir(run_path)

    write_config(config, op.join(run_path, 'config.yaml'))

    ifm_path_template = config.get_ifm_path_template()
    detections_path = config.get_detections_path()
    events_path = config.get_events_path()
    figures_path_template = config.get_figures_path_template()

    config.setup_image_function_contributions()
    ifcs = config.image_function_contributions

    grid = config.get_grid()
    receivers = config.get_receivers()

    norm_map = gridmod.geometrical_normalization(grid, receivers)

    data_paths = fp(config.data_paths)
    for data_path in fp(data_paths):
        if not op.exists(data_path):
            pass

    p = pile.make_pile(data_paths, fileformat='detect')
    if p.is_empty():
        raise common.LassieError('no usable waveforms found')

    for ifc in ifcs:
        ifc.prescan(p)

    shift_tables = []
    tshift_minmaxs = []
    for ifc in ifcs:
        shift_tables.append(ifc.get_table(grid, receivers))
        tshift_minmaxs.append(num.nanmin(shift_tables[-1]))
        tshift_minmaxs.append(num.nanmax(shift_tables[-1]))

    fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)

    tshift_min = min(tshift_minmaxs)
    tshift_max = max(tshift_minmaxs)

    if config.detector_tpeaksearch is not None:
        tpeaksearch = config.detector_tpeaksearch
    else:
        tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min

    tpad = max(ifc.get_tpad() for ifc in ifcs) + \
        (tshift_max - tshift_min) + tpeaksearch

    tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad
    tavail = p.tmax - p.tmin
    tinc = min(tinc, tavail - 2.0 * tpad)

    if tinc <= 0:
        raise common.LassieError('available waveforms too short \n'
                                 'required: %g s\n'
                                 'available: %g s\n' % (2. * tpad, tavail))

    blacklist = set(tuple(s.split('.')) for s in config.blacklist)
    whitelist = set(tuple(s.split('.')) for s in config.whitelist)

    distances = grid.distances(receivers)
    distances_to_grid = num.min(distances, axis=0)

    distance_min = num.min(distances)
    distance_max = num.max(distances)

    station_index = dict(
        (rec.codes, i) for (i, rec) in enumerate(receivers)
        if rec.codes not in blacklist and (
            not whitelist or rec.codes in whitelist) and (
                config.distance_max is None
                or distances_to_grid[i] <= config.distance_max))

    check_data_consistency(p, config)

    deltat_cf = max(p.deltats.keys())
    assert deltat_cf > 0.0

    while True:
        if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs):
            break

        deltat_cf *= 2
    logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' %
                (deltat_cf, 1.0 / deltat_cf))

    ngridpoints = grid.size()

    logger.info('number of grid points: %i' % ngridpoints)
    logger.info('minimum source-receiver distance: %g m' % distance_min)
    logger.info('maximum source-receiver distance: %g m' % distance_max)
    logger.info('minimum travel-time: %g s' % tshift_min)
    logger.info('maximum travel-time: %g s' % tshift_max)

    idetection = 0

    tmin = override_tmin or config.tmin or p.tmin + tpad
    tmax = override_tmax or config.tmax or p.tmax - tpad

    events = config.get_events()
    twindows = []
    if events is not None:
        for ev in events:
            if tmin <= ev.time <= tmax:
                twindows.append(
                    (ev.time + tshift_min - (tshift_max - tshift_min) *
                     config.event_time_window_factor,
                     ev.time + tshift_min + (tshift_max - tshift_min) *
                     config.event_time_window_factor))

    else:
        twindows.append((tmin, tmax))

    for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows):

        nwin = int(math.ceil((tmax_win - tmin_win) / tinc))

        logger.info('start processing time window group %i/%i: %s - %s' %
                    (iwindow_group + 1, len(twindows),
                     util.time_to_str(tmin_win), util.time_to_str(tmax_win)))

        logger.info('number of time windows: %i' % nwin)
        logger.info('time window length: %g s' % (tinc + 2.0 * tpad))
        logger.info('time window payload: %g s' % tinc)
        logger.info('time window padding: 2 x %g s' % tpad)
        logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad /
                                                   (tinc + 2.0 * tpad)))

        iwin = -1

        for trs in p.chopper(
                tmin=tmin_win,
                tmax=tmax_win,
                tinc=tinc,
                tpad=tpad,
                want_incomplete=config.fill_incomplete_with_zeros,
                trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
            iwin += 1
            trs_ok = []
            for tr in trs:
                if tr.ydata.size == 0:
                    logger.warn('skipping empty trace: %s.%s.%s.%s' %
                                tr.nslc_id)

                    continue

                if not num.all(num.isfinite(tr.ydata)):
                    logger.warn('skipping trace because of invalid values: '
                                '%s.%s.%s.%s' % tr.nslc_id)

                    continue

                trs_ok.append(tr)

            trs = trs_ok

            if not trs:
                continue

            logger.info('processing time window %i/%i: %s - %s' %
                        (iwin + 1, nwin, util.time_to_str(
                            trs[0].wmin), util.time_to_str(trs[0].wmax)))

            wmin = trs[0].wmin
            wmax = trs[0].wmax

            if config.fill_incomplete_with_zeros:
                trs = zero_fill(trs, wmin - tpad, wmax + tpad)

            t0 = math.floor(wmin / deltat_cf) * deltat_cf
            iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf))
            iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf))
            lengthout = iwmax - iwmin + 1

            pdata = []
            trs_debug = []
            parstack_params = []
            for iifc, ifc in enumerate(ifcs):
                dataset = ifc.preprocess(trs, wmin - tpeaksearch,
                                         wmax + tpeaksearch,
                                         tshift_max - tshift_min, deltat_cf)
                if not dataset:
                    continue

                nstations_selected = len(dataset)

                nsls_selected, trs_selected = zip(*dataset)

                for tr in trs_selected:
                    tr.meta = {'tabu': True}

                trs_debug.extend(trs + list(trs_selected))

                istations_selected = num.array(
                    [station_index[nsl] for nsl in nsls_selected],
                    dtype=num.int)
                arrays = [tr.ydata.astype(num.float) for tr in trs_selected]

                offsets = num.array([
                    int(round((tr.tmin - t0) / deltat_cf))
                    for tr in trs_selected
                ],
                                    dtype=num.int32)

                w = ifc.get_weights(nsls_selected)

                weights = num.ones((ngridpoints, nstations_selected))
                weights *= w[num.newaxis, :]
                weights *= ifc.weight

                shift_table = shift_tables[iifc]

                ok = num.isfinite(shift_table[:, istations_selected])
                bad = num.logical_not(ok)

                shifts = -num.round(shift_table[:, istations_selected] /
                                    deltat_cf).astype(num.int32)

                weights[bad] = 0.0
                shifts[bad] = num.max(shifts[ok])

                pdata.append((list(trs_selected), shift_table, ifc))
                parstack_params.append((arrays, offsets, shifts, weights))

            if config.stacking_blocksize is not None:
                ipstep = config.stacking_blocksize
                frames = None
            else:
                ipstep = lengthout
                frames = num.zeros((ngridpoints, lengthout))

            twall_start = time.time()
            frame_maxs = num.zeros(lengthout)
            frame_argmaxs = num.zeros(lengthout, dtype=num.int)
            ipmin = iwmin
            while ipmin < iwmin + lengthout:
                ipsize = min(ipstep, iwmin + lengthout - ipmin)
                if ipstep == lengthout:
                    frames_p = frames
                else:
                    frames_p = num.zeros((ngridpoints, ipsize))

                for (arrays, offsets, shifts, weights) in parstack_params:
                    frames_p, _ = parstack(arrays,
                                           offsets,
                                           shifts,
                                           weights,
                                           0,
                                           offsetout=ipmin,
                                           lengthout=ipsize,
                                           result=frames_p,
                                           nparallel=nparallel,
                                           impl='openmp')

                if config.sharpness_normalization:
                    frame_p_maxs = frames_p.max(axis=0)
                    frame_p_means = num.abs(frames_p).mean(axis=0)
                    frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :]
                    frames_p *= norm_map[:, num.newaxis]

                if config.ifc_count_normalization:
                    frames_p *= 1.0 / len(ifcs)

                frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    frames_p.max(axis=0)
                frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    pargmax(frames_p)

                ipmin += ipstep
                del frames_p

            twall_end = time.time()

            logger.info('wallclock time for stacking: %g s' %
                        (twall_end - twall_start))

            tmin_frames = t0 + iwmin * deltat_cf

            tr_stackmax = trace.Trace('',
                                      'SMAX',
                                      '',
                                      '',
                                      tmin=tmin_frames,
                                      deltat=deltat_cf,
                                      ydata=frame_maxs)

            tr_stackmax.meta = {'tabu': True}

            trs_debug.append(tr_stackmax)

            if show_window_traces:
                trace.snuffle(trs_debug)

            ydata_window = tr_stackmax.chop(wmin, wmax,
                                            inplace=False).get_ydata()

            logger.info('CF stats: min %g, max %g, median %g' %
                        (num.min(ydata_window), num.max(ydata_window),
                         num.median(ydata_window)))
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s'
                    % nstations_selected)

            detector_threshold_seiger = config.detector_threshold - (
                (17 - nstations_selected) * 4
            )  # 17 is maximum number of seiger stations, 4 is a mean baseline for noise
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s, threshold now: %s'
                    % (nstations_selected, detector_threshold_seiger))

            tpeaks, apeaks = list(
                zip(*[(tpeak, apeak) for (tpeak, apeak) in zip(
                    *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch))
                      if wmin <= tpeak and tpeak < wmax])) or ([], [])

            tr_stackmax_indx = tr_stackmax.copy(data=False)
            tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32))
            tr_stackmax_indx.set_location('i')

            for (tpeak, apeak) in zip(tpeaks, apeaks):

                iframe = int(round((tpeak - tmin_frames) / deltat_cf))
                imax = frame_argmaxs[iframe]

                latpeak, lonpeak, xpeak, ypeak, zpeak = \
                    grid.index_to_location(imax)

                idetection += 1

                detection = Detection(id='%06i' % idetection,
                                      time=tpeak,
                                      location=geo.Point(lat=float(latpeak),
                                                         lon=float(lonpeak),
                                                         x=float(xpeak),
                                                         y=float(ypeak),
                                                         z=float(zpeak)),
                                      ifm=float(apeak))

                if bark:
                    common.bark()

                logger.info('detection found: %s' % str(detection))

                f = open(detections_path, 'a')
                f.write(
                    '%06i %s %g %g %g %g %g %g\n' %
                    (idetection,
                     util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'),
                     apeak, latpeak, lonpeak, xpeak, ypeak, zpeak))

                f.close()

                ev = detection.get_event()
                f = open(events_path, 'a')
                model.dump_events([ev], stream=f)
                f.close()

                if show_detections or config.save_figures:
                    fmin = min(ifc.fmin for ifc in ifcs)
                    fmax = min(ifc.fmax for ifc in ifcs)

                    fn = figures_path_template % {
                        'id': util.tts(t0).replace(" ", "T"),
                        'format': 'png'
                    }

                    util.ensuredirs(fn)

                    if frames is not None:
                        frames_p = frames
                        tmin_frames_p = tmin_frames
                        iframe_p = iframe

                    else:
                        iframe_min = max(
                            0, int(round(iframe - tpeaksearch / deltat_cf)))
                        iframe_max = min(
                            lengthout - 1,
                            int(round(iframe + tpeaksearch / deltat_cf)))

                        ipsize = iframe_max - iframe_min + 1
                        frames_p = num.zeros((ngridpoints, ipsize))
                        tmin_frames_p = tmin_frames + iframe_min * deltat_cf
                        iframe_p = iframe - iframe_min

                        for (arrays, offsets, shifts, weights) \
                                in parstack_params:

                            frames_p, _ = parstack(arrays,
                                                   offsets,
                                                   shifts,
                                                   weights,
                                                   0,
                                                   offsetout=iwmin +
                                                   iframe_min,
                                                   lengthout=ipsize,
                                                   result=frames_p,
                                                   nparallel=nparallel,
                                                   impl='openmp')

                        if config.sharpness_normalization:
                            frame_p_maxs = frames_p.max(axis=0)
                            frame_p_means = num.abs(frames_p).mean(axis=0)
                            frames_p *= (frame_p_maxs /
                                         frame_p_means)[num.newaxis, :]
                            frames_p *= norm_map[:, num.newaxis]

                        if config.ifc_count_normalization:
                            frames_p *= 1.0 / len(ifcs)
                    try:
                        plot.plot_detection(grid,
                                            receivers,
                                            frames_p,
                                            tmin_frames_p,
                                            deltat_cf,
                                            imax,
                                            iframe_p,
                                            xpeak,
                                            ypeak,
                                            zpeak,
                                            tr_stackmax,
                                            tpeaks,
                                            apeaks,
                                            detector_threshold_seiger,
                                            wmin,
                                            wmax,
                                            pdata,
                                            trs,
                                            fmin,
                                            fmax,
                                            idetection,
                                            tpeaksearch,
                                            movie=show_movie,
                                            show=show_detections,
                                            save_filename=fn,
                                            event=ev)
                    except:
                        pass

                    del frames_p

                if stop_after_first:
                    return

            tr_stackmax.chop(wmin, wmax)
            tr_stackmax_indx.chop(wmin, wmax)
            if save_imax is True:
                io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template)

            del frames
        logger.info('end processing time window group: %s - %s' %
                    (util.time_to_str(tmin_win), util.time_to_str(tmax_win)))
    cat = Catalog()
    files = glob("%s/../figures/*qml*" % run_path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
Esempio n. 50
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option(
        "--force",
        dest="force",
        action="store_true",
        default=False,
        help="allow recreation of output <directory>",
    )

    parser.add_option(
        "--debug",
        dest="debug",
        action="store_true",
        default=False,
        help="print debugging information to stderr",
    )

    parser.add_option(
        "--dry-run",
        dest="dry_run",
        action="store_true",
        default=False,
        help="show available stations/channels and exit "
        "(do not download waveforms)",
    )

    parser.add_option(
        "--continue",
        dest="continue_",
        action="store_true",
        default=False,
        help="continue download after a accident",
    )

    parser.add_option(
        "--local-data",
        dest="local_data",
        action="append",
        help="add file/directory with local data",
    )

    parser.add_option(
        "--local-stations",
        dest="local_stations",
        action="append",
        help="add local stations file",
    )

    parser.add_option(
        "--local-responses-resp",
        dest="local_responses_resp",
        action="append",
        help="add file/directory with local responses in RESP format",
    )

    parser.add_option(
        "--local-responses-pz",
        dest="local_responses_pz",
        action="append",
        help="add file/directory with local pole-zero responses",
    )

    parser.add_option(
        "--local-responses-stationxml",
        dest="local_responses_stationxml",
        help="add file with local response information in StationXML format",
    )

    parser.add_option(
        "--window",
        dest="window",
        default="full",
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        "] (time format is YYYY-MM-DD HH:MM:SS)",
    )

    parser.add_option(
        "--out-components",
        choices=["enu", "rtu"],
        dest="out_components",
        default="rtu",
        help="set output component orientations to radial-transverse-up [rtu] "
        "(default) or east-north-up [enu]",
    )

    parser.add_option(
        "--padding-factor",
        type=float,
        default=3.0,
        dest="padding_factor",
        help="extend time window on either side, in multiples of 1/<fmin_hz> "
        "(default: 5)",
    )

    parser.add_option(
        "--credentials",
        dest="user_credentials",
        action="append",
        default=[],
        metavar="SITE,USER,PASSWD",
        help="user credentials for specific site to access restricted data "
        "(this option can be repeated)",
    )

    parser.add_option(
        "--token",
        dest="auth_tokens",
        metavar="SITE,FILENAME",
        action="append",
        default=[],
        help="user authentication token for specific site to access "
        "restricted data (this option can be repeated)",
    )

    parser.add_option(
        "--sites",
        dest="sites",
        metavar="SITE1,SITE2,...",
        #    default='bgr',
        default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080",
        help='sites to query (available: %s, default: "%%default"' %
        ", ".join(g_sites_available),
    )

    parser.add_option(
        "--band-codes",
        dest="priority_band_code",
        metavar="V,L,M,B,H,S,E,...",
        default="V,L,M,B,H,E",
        help="select and prioritize band codes (default: %default)",
    )

    parser.add_option(
        "--instrument-codes",
        dest="priority_instrument_code",
        metavar="H,L,G,...",
        default="H,L,O,",
        help="select and prioritize instrument codes (default: %default)",
    )

    parser.add_option(
        "--radius-min",
        dest="radius_min",
        metavar="VALUE",
        default=0.0,
        type=float,
        help="minimum radius [km]",
    )

    parser.add_option(
        "--tinc",
        dest="tinc",
        metavar="VALUE",
        default=3600.0 * 12.0,
        type=float,
        help="length of seperate saved files in s",
    )

    parser.add_option(
        "--nstations-wanted",
        dest="nstations_wanted",
        metavar="N",
        type=int,
        help="number of stations to select initially",
    )

    (options, args) = parser.parse_args(sys.argv[1:])
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, "debug")
    else:
        util.setup_logging(program_name, "info")

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical("cannot use local responses in PZ and RESP "
                        "format at the same time")
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml,
    ):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical("can only handle local responses from either PZ or "
                        "RESP or StationXML. Cannot yet merge different "
                        "response formats.")
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical("--local-responses-resp can only be used "
                        "when --stations is also given.")
        sys.exit(1)

    try:
        ename = ""
        magnitude = None
        mt = None
        if len(args) == 9:
            time = util.str_to_time(args[0] + " " + args[1])
            lat = float(args[2])
            lon = float(args[3])
            depth = float(args[4]) * km
            iarg = 5

        elif len(args) == 6:
            if args[1].find(":") == -1:
                sname_or_date = None
                lat = float(args[0])
                lon = float(args[1])
                event = None
                time = None
            else:
                sname_or_date = args[0] + " " + args[1]

            iarg = 2

        elif len(args) == 5:
            sname_or_date = args[0]
            iarg = 1

        if len(args) in (6, 5) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical("no event found")
                sys.exit(1)
            elif len(events) > 1:
                logger.critical("more than one event found")
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        event_dir = op.join("data", "events", eventname)
        output_dir = op.join(event_dir, "waveforms")
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time,
            lat=lat,
            lon=lon,
            depth=depth,
            name=ename,
            magnitude=magnitude,
            moment_tensor=mt,
        )

    if options.window == "full":
        if event is None:
            logger.critical("need event for --window=full")
            sys.exit(1)

        low_velocity = 1500.0
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == "p":
        if event is None:
            logger.critical("need event for --window=p")
            sys.exit(1)

        phases = list(map(cake.PhaseDef, "P p".split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error("required phase arrival not found")
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(",")
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    priority_band_code = options.priority_band_code.split(",")
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical("invalid band code: %s" % s)

    priority_instrument_code = options.priority_instrument_code.split(",")
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical("invalid instrument code: %s" % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min * km * cake.m2d,
        maxradius=radius * cake.m2d,
        channel=",".join("?%s?" % s for s in priority_band_code),
    )

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ["M/S", "M", "M/S**2"]

    output_units = "M"

    sites = [x.strip() for x in options.sites.split(",") if x.strip()]
    tinc = options.tinc
    #  for site in sites:
    #     if site not in g_sites_available:
    #        logger.critical('unknown FDSN site: %s' % site)
    #       sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(",")
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(",")
            with open(token_filename, "r") as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical("cannot get token from file: %s" % token_filename)
            sys.exit(1)

    fn_template0 = (
        "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed")

    fn_template_raw = op.join(output_dir, "raw", fn_template0)
    fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed")
    fn_stations_raw = op.join(output_dir, "stations.raw.txt")
    fn_template_rest = op.join(output_dir, "rest", fn_template0)
    fn_commandline = op.join(output_dir, "seigerdown.command")

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                "iris": dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == "geonet":
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(site in g_user_credentials
                                       or site in g_auth_tokens),
                )

            logger.info("downloading channel information (%s)" % site)
            sx = fdsn.station(site=site,
                              format="text",
                              level="channel",
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error("No stations matching given criteria. (%s)" % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}
    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()
            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                nsl_to_station[nsl] = s  # using first site with this station
    logger.info("number of stations found: %i" % len(nsl_to_station))

    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info("number of stations selected: %i" % len(nsls_selected))

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info("time window %i/%i (%s - %s)" %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win),
            )

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = tmax_ + tpad

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)

                    if channel.sample_rate:
                        deltat = 1.0 / channel.sample_rate.value
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        selection.append(nslc + (tmin_req - deltat * 10.0,
                                                 tmax_req + deltat * 10.0))

            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ""
                        if nbatches > 1:
                            sbatch = " (batch %i/%i)" % (
                                (i // neach) + 1, nbatches)

                        logger.info("downloading data (%s)%s" % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            if tr.station == "7869":
                                tr.station = "MOER"
                                tr.network = "LE"
                                tr.location = ""
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        io.save(trs, fn_template_raw_folder)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn("overwriting file %s", fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn("an error occurred while downloading data "
                                    "for channels \n  %s" %
                                    "\n  ".join(".".join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return "" if x == 1 else "s"

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info("selected: %s.%s.%s.%s from site%s %s" %
                        (nslc + (plural_s(len(sites)), "+".join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info("selected (%s): %i channel%s (%i station%s)" % (
                "+".join(sites),
                nchannels,
                plural_s(nchannels),
                nstations,
                plural_s(nstations),
            ))

        logger.info("selected total: %i channel%s (%i station%s)" % (
            nchannels_all,
            plural_s(nchannels_all),
            nstations_all,
            plural_s(nstations_all),
        ))

        logger.info("dry run done.")
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info("downloading response information (%s)" % site)
            sxs[site] = fdsn.station(site=site,
                                     level="response",
                                     selection=selection)
            sited = site

            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            elif site == "http://ws.gpi.kit.edu":
                sited = "kit"
            if site == "http://188.246.25.142:8080":
                sited = "moer"

            sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" %
                                                sited))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site["local"] = set()
        plocal = pile.make_pile(options.local_data, fileformat="detect")
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.station == "7869":
                    tr.station = "MOER"
                    tr.network = "LE"
                    tr.location = ""
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site["local"].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append("local")

    if options.local_responses_pz:
        sxs["local"] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs["local"] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs["local"] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error("no data available")
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0)
    otinc = 3600.0
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            if tr.station == "7869":
                tr.station = "MOER"
                tr.network = "LE"
                tr.location = ""
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units,
                    )

                    break

                except stationxml.NoResponseInformation:
                    failure.append("%s: no response information" % site)

                except stationxml.MultipleResponseInformation:
                    failure.append("%s: multiple response information" % site)

            if response is None:
                failure = ", ".join(failure)

            else:
                failure = ""
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = "trace too short"

            if failure:
                logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap="crossfade_cos")

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    if tr.station == "7869":
                        tr.station = "MOER"
                        tr.network = "LE"
                        tr.location = ""
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s"

    fn_waveforms = op.join(output_dir, "prepared", fn_template1)
    fn_stations = op.join(output_dir, "stations.prepared.txt")
    fn_event = op.join(event_dir, "event.txt")

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn("Cannot downsample %s.%s.%s.%s: %s" %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == "rtu":
            pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z"))
        elif options.out_components == "enu":
            pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z"))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info("prepared waveforms from %i stations" % len(stations))
Esempio n. 51
0
from pyrocko import trace, util, io
import numpy as num

nsamples = 100
tmin = util.str_to_time('2010-02-20 15:15:30.100')
data1 = num.random.random(nsamples)
data2 = num.random.random(nsamples)
t1 = trace.Trace(network='1-',
                 station='TE',
                 channel='Z',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data1)
t2 = trace.Trace(network='1-',
                 station='TE',
                 channel='N',
                 deltat=0.5,
                 tmin=tmin,
                 ydata=data2)
io.save([t1, t2], 'my_precious_test_traces.mseed')  # all traces in one file
Esempio n. 52
0
def iter_chunked(tinc,
                 path,
                 data_pile,
                 tmin=None,
                 tmax=None,
                 minlat=49.1379,
                 maxlat=49.1879,
                 minlon=8.1223,
                 maxlon=8.1723,
                 channels=["EH" + "[ZNE]"],
                 client_list=["BGR"],
                 download=True,
                 seiger=True,
                 selection=None,
                 path_waveforms=None,
                 sds=None,
                 stream=False,
                 reject_blacklisted=None,
                 tpad=0,
                 tstart=None,
                 tstop=None,
                 hf=10,
                 lf=1,
                 deltat=None,
                 models=None):
    try:
        tstart = util.stt(tmin) if tmin else None
        tstop = util.stt(tmax) if tmax else None
    except:
        pass
    model_path = os.path.dirname(
        os.path.abspath(__file__)) + "/model/EqT_model.h5"
    deltat_cf = min(data_pile.deltats.keys())
    for i, trs in enumerate(
            data_pile.chopper(tinc=tinc,
                              tmin=tstart,
                              tmax=tstop,
                              tpad=tpad,
                              keep_current_files_open=False,
                              want_incomplete=True)):
        tminc = None
        tmaxc = None
        for tr in trs:
            if tminc is None:
                tminc = tr.tmin
                tmaxc = tr.tmax
            else:
                if tminc < tr.tmin:
                    tminc = tr.tmin
                if tmaxc > tr.tmax:
                    tmaxc = tr.tmax
        for tr in trs:
            tr.highpass(4, 5)
            #    try:
            try:
                tr.chop(tminc, tmaxc)
            except:
                pass
            date_min = download_raw.get_time_format_eq(tminc)
            date_max = download_raw.get_time_format_eq(tmaxc)
            io.save(
                tr, "%s/downloads/%s/%s.%s..%s__%s__%s.mseed" %
                (path, tr.station, tr.network, tr.station, tr.channel,
                 date_min, date_max))
        #    except:
        #        pass

        process(path,
                tmin=tminc,
                tmax=tmaxc,
                minlat=minlat,
                maxlat=maxlat,
                minlon=minlon,
                maxlon=maxlon,
                channels=channels,
                client_list=client_list,
                download=download,
                seiger=seiger,
                selection=selection,
                path_waveforms=path_waveforms,
                stream=stream,
                model=None,
                iter=i,
                models=models,
                sds=None)
        for tr in trs:
            subprocess.run([
                "rm -r %s/downloads/%s/%s.%s..%s__%s__%s.mseed" %
                (path, tr.station, tr.network, tr.station, tr.channel,
                 date_min, date_max)
            ],
                           shell=True)

    cat = Catalog()
    files = glob("%s/asociation*/associations.xml" % path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/events_eqt.qml" % path, format="QUAKEML")
# lese Events:
events = model.load_events('/home/zmaw/u254061/master/event_marker_IPMA.txt')

# Erstelle ein pile aus allen miniseed in den Ordnern, die man hinter dem Programmaufruf angegeben hat:
outpile = pile.make_pile(dirs)

f=open('/home/zmaw/u254061/master/event_marker_IPMA.txt')
for line in f:
    if line.lstrip().startswith('#'):
        continue
    toks = line.split()
    timedate, timetime =toks[1], toks[2]
    gtime = util.str_to_time(str(timedate+' '+timetime))

    trange = [gtime-100, gtime+1000]
    new_pile = []
    for traces in outpile.chopper(trange[0], trange[1], load_data=True, degap=False):
        if traces:
            window_start = traces[0].wmin
            timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H') 
            io.save(traces, "/scratch/local1/doctar_cut/cut_%s.mseed"%timestring)

f.close()

# Rufe an dem pile den snuffler auf mit den Stationsdaten:
#outpile.snuffle(stations=stats)



Esempio n. 54
0
 def save(self, traces, fn='beam.pf'):
     io.save(traces, fn)
Esempio n. 55
0
    def ensure_waveforms(self, engine, sources, path, tmin=None, tmax=None):

        path_waveforms = op.join(path, 'waveforms')
        util.ensuredir(path_waveforms)

        p = self._get_pile(path_waveforms)

        nslc_ids = set(target.codes for target in self.get_targets())

        def have_waveforms(tmin, tmax):
            trs_have = p.all(
                tmin=tmin, tmax=tmax,
                load_data=False, degap=False,
                trace_selector=lambda tr: tr.nslc_id in nslc_ids)

            return any(tr.data_len() > 0 for tr in trs_have)

        def add_files(paths):
            p.load_files(paths, fileformat='mseed', show_progress=False)

        path_traces = op.join(
            path_waveforms,
            '%(wmin_year)s',
            '%(wmin_month)s',
            '%(wmin_day)s',
            'waveform_%(network)s_%(station)s_' +
            '%(location)s_%(channel)s_%(tmin)s_%(tmax)s.mseed')

        tmin_all, tmax_all = self.get_time_range(sources)
        tmin = tmin if tmin is not None else tmin_all
        tmax = tmax if tmax is not None else tmax_all
        tts = util.time_to_str

        tinc = self.tinc or self.get_useful_time_increment(engine, sources)
        tmin = math.floor(tmin / tinc) * tinc
        tmax = math.ceil(tmax / tinc) * tinc

        nwin = int(round((tmax - tmin) / tinc))

        pbar = None
        for iwin in range(nwin):
            tmin_win = tmin + iwin*tinc
            tmax_win = tmin + (iwin+1)*tinc

            if have_waveforms(tmin_win, tmax_win):
                continue

            if pbar is None:
                pbar = util.progressbar('Generating waveforms', (nwin-iwin))

            pbar.update(iwin)

            trs = self.get_waveforms(engine, sources, tmin_win, tmax_win)

            try:
                wpaths = io.save(
                    trs, path_traces,
                    additional=dict(
                        wmin_year=tts(tmin_win, format='%Y'),
                        wmin_month=tts(tmin_win, format='%m'),
                        wmin_day=tts(tmin_win, format='%d'),
                        wmin=tts(tmin_win, format='%Y-%m-%d_%H-%M-%S'),
                        wmax_year=tts(tmax_win, format='%Y'),
                        wmax_month=tts(tmax_win, format='%m'),
                        wmax_day=tts(tmax_win, format='%d'),
                        wmax=tts(tmax_win, format='%Y-%m-%d_%H-%M-%S')))

                for wpath in wpaths:
                    logger.debug('Generated file: %s' % wpath)

                add_files(wpaths)

            except FileSaveError as e:
                raise ScenarioError(str(e))

        if pbar is not None:
            pbar.finish()
Esempio n. 56
0
from pyrocko import pile, io, util
import time
import calendar
''' Chope a pile of waveform traces into segments '''

p = pile.make_pile(['test.mseed'])

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm(time.gmtime(p.tmin)[:4] + (0, 0))

# iterate over the data, with a window length of one hour
for traces in p.chopper(tmin=tmin, tinc=3600):
    if traces:  # the list could be empty due to gaps
        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H')
        filepath = 'test_hourfiles/hourfile-%s.mseed' % timestring
        io.save(traces, filepath)
Esempio n. 57
0
# when pile.make_pile() is called without any arguments, the command line 
# parameters given to the script are searched for waveform files and directories
p = pile.make_pile()

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm( time.gmtime(p.tmin)[:4] + ( 0, 0 ) )

tinc = 3600.
tpad = 10.
target_deltat = 0.1

# iterate over the data, with a window length of one hour and 2x10 seconds of
# overlap
for traces in p.chopper(tmin=tmin, tinc=tinc, tpad=tpad):
    
    if traces: # the list could be empty due to gaps
        for tr in traces:
            tr.downsample_to(target_deltat, snap=True, demean=False)
            
            # remove overlapping
            tr.chop(tr.wmin, tr.wmax)
        
        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H')
        filepath = 'downsampled/%(station)s_%(channel)s_%(mytimestring)s.mseed'
        io.save(traces, filepath, additional={'mytimestring': timestring})


# now look at the result with
#   > snuffler downsampled/
Esempio n. 58
0
# Download example data
get_example_data('data_conversion', recursive=True)

input_path = 'data_conversion/mseed'
output_path = 'data_conversion/sac/' \
        '%(dirhz)s/%(station)s_%(channel)s_%(tmin)s.sac'

fn_stations = 'data_conversion/stations.txt'

stations_list = model.load_stations(fn_stations)

stations = {}
for s in stations_list:
    stations[s.network, s.station, s.location] = s
    s.set_channels_by_name(*'BHN BHE BHZ BLN BLE BLZ'.split())

p = pile.make_pile(input_path)
h = 3600.
tinc = 1 * h
tmin = util.day_start(p.tmin)
for traces in p.chopper_grouped(tmin=tmin,
                                tinc=tinc,
                                gather=lambda tr: tr.nslc_id):
    for tr in traces:
        dirhz = '%ihz' % int(round(1. / tr.deltat))
        io.save([tr],
                output_path,
                format='sac',
                additional={'dirhz': dirhz},
                stations=stations)
from pyrocko import pile, io, util
import time
import calendar
''' Chope a pile of waveform traces into segments '''

p = pile.make_pile(['test.mseed'])

# get timestamp for full hour before first data sample in all selected traces
tmin = calendar.timegm(time.gmtime(p.tmin)[:4] + (0, 0))

# iterate over the data, with a window length of one hour
for traces in p.chopper(tmin=tmin, tinc=3600):
    if traces:    # the list could be empty due to gaps
        window_start = traces[0].wmin
        timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H')
        filepath = 'test_hourfiles/hourfile-%s.mseed' % timestring
        io.save(traces, filepath)
Esempio n. 60
0
engine = LocalEngine(store_superdirs=['/home/alireza/Kiwi/GFDB'])

# Define a list of pyrocko.gf.Target objects, representing the recording
# devices. In this case one station with a three component sensor will
# serve fine for demonstation.
channel_codes = 'ENZ'
targets = [
    Target(
        lat=36.21,
        lon=48.22,
        store_id=store_id,
        codes=('', 'CVD', '', channel_code))
    for channel_code in channel_codes]

# Let's use a double couple source representation.
source_dc = DCSource(
    lat=34.00,
    lon=45.00,
    depth=10000,
    strike=170,
    dip=35,
    rake=50,
    magnitude=5.6)

# Processing that data will return a pyrocko.gf.Reponse object.
response = engine.process(source_dc, targets)

# This will return a list of the requested traces:
synthetic_traces = response.pyrocko_traces()
io.save(synthetic_traces, 'DISPL.CVD.mseed')