コード例 #1
0
ファイル: test_core.py プロジェクト: Brtle/obspy
 def setUp(self):
     # directory where the test files are located
     self.path = os.path.join(os.path.dirname(__file__), 'data')
     self.filename_css = os.path.join(self.path, 'test_css.wfdisc')
     self.filename_nnsa = os.path.join(self.path, 'test_nnsa.wfdisc')
     # set up stream for validation
     header = {}
     header['station'] = 'TEST'
     header['starttime'] = UTCDateTime(1296474900.0)
     header['sampling_rate'] = 80.0
     header['calib'] = 1.0
     header['calper'] = 1.0
     header['_format'] = 'CSS'
     filename = os.path.join(self.path, '201101311155.10.ascii.gz')
     with gzip.open(filename, 'rb') as fp:
         data = np.loadtxt(fp, dtype=np.int_)
     # traces in the test files are sorted ZEN
     st = Stream()
     for x, cha in zip(data.reshape((3, 4800)), ('HHZ', 'HHE', 'HHN')):
         # big-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'be'
         tr.stats.channel = cha
         st += tr
         # little-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'le'
         tr.stats.channel = cha
         st += tr
     self.st_result_css = st.copy()
     for tr in st:
         tr.stats['_format'] = "NNSA_KB_CORE"
     self.st_result_nnsa = st
コード例 #2
0
ファイル: test_core.py プロジェクト: yanyuandaxia/obspy
 def setUp(self):
     # directory where the test files are located
     self.path = os.path.join(os.path.dirname(__file__), 'data')
     self.filename_css = os.path.join(self.path, 'test_css.wfdisc')
     self.filename_nnsa = os.path.join(self.path, 'test_nnsa.wfdisc')
     self.filename_css_2 = os.path.join(self.path, 'test_css_2.wfdisc')
     self.filename_css_3 = os.path.join(self.path, 'test_css_3.wfdisc')
     # set up stream for validation
     header = {}
     header['station'] = 'TEST'
     header['starttime'] = UTCDateTime(1296474900.0)
     header['sampling_rate'] = 80.0
     header['calib'] = 1.0
     header['calper'] = 1.0
     header['_format'] = 'CSS'
     filename = os.path.join(self.path, '201101311155.10.ascii.gz')
     with gzip.open(filename, 'rb') as fp:
         data = np.loadtxt(fp, dtype=np.int_)
     # traces in the test files are sorted ZEN
     st = Stream()
     for x, cha in zip(data.reshape((3, 4800)), ('HHZ', 'HHE', 'HHN')):
         # big-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'be'
         tr.stats.channel = cha
         st += tr
         # little-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'le'
         tr.stats.channel = cha
         st += tr
     self.st_result_css = st.copy()
     for tr in st:
         tr.stats['_format'] = "NNSA_KB_CORE"
     self.st_result_nnsa = st
コード例 #3
0
def update_waveform():

    # Load new data
    global st
    st = Stream()

    st = utils.get_stream(settings['datasource'], settings['scnl'],
                          UTCDateTime(start_input.value),
                          UTCDateTime(start_input.value) + 30 * 60)
    st = st.filter('bandpass', freqmin=FREQMIN, freqmax=FREQMAX)

    # Initialize data source for filtered waveform plotting
    st_plot = st.copy()
    st_plot.filter('lowpass', freq=20.0)
    offset = len(
        st
    ) * 2 - 2  # offset is defined and incremented such that (e.g.) four channels will be plotted top to bottom at center values 6,4,2,0
    times = []
    traces = []
    for s in st_plot:
        times.append(num2date(s.times('matplotlib')))
        traces.append(s.data / max(s.data) + offset)
        offset -= 2
    waveclr = ['black'] * len(st)
    source_waveforms.data = {
        'times': times,
        'traces': traces,
        'color': waveclr
    }

    # Update the CFT
    update_cft(ticker_alg.value)
コード例 #4
0
ファイル: utils.py プロジェクト: tianyining/OBStools
def QC_streams(start, end, st):

    # Check start times
    if not np.all([tr.stats.starttime == start for tr in st]):
        print("* Start times are not all close to true start: ")
        [
            print("*   " + tr.stats.channel + " " + str(tr.stats.starttime) +
                  " " + str(tr.stats.endtime)) for tr in st
        ]
        print("*   True start: " + str(start))
        print("* -> Shifting traces to true start")
        delay = [tr.stats.starttime - start for tr in st]
        st_shifted = Stream(
            traces=[traceshift(tr, dt) for tr, dt in zip(st, delay)])
        st = st_shifted.copy()

    # # Check sampling rate
    # sr = st[0].stats.sampling_rate
    # sr_round = float(floor_decimal(sr, 0))
    # if not sr == sr_round:
    #     print("* Sampling rate is not an integer value: ", sr)
    #     print("* -> Resampling")
    #     st.resample(sr_round, no_filter=False)

    # Try trimming
    dt = st[0].stats.delta
    try:
        st.trim(start, end - dt, fill_value=0., pad=True)
    except:
        print("* Unable to trim")
        print("* -> Skipping")
        print("**************************************************")
        return False, None

    # Check final lengths - they should all be equal if start times
    # and sampling rates are all equal and traces have been trimmed
    sr = st[0].stats.sampling_rate
    if not np.allclose([tr.stats.npts for tr in st[1:]], st[0].stats.npts):
        print("* Lengths are incompatible: ")
        [print("*     " + str(tr.stats.npts)) for tr in st]
        print("* -> Skipping")
        print("**************************************************")

        return False, None

    elif not np.allclose([st[0].stats.npts], int((end - start) * sr), atol=1):
        print("* Length is too short: ")
        print("*    " + str(st[0].stats.npts) + " ~= " +
              str(int((end - start) * sr)))
        print("* -> Skipping")
        print("**************************************************")

        return False, None

    else:
        return True, st
コード例 #5
0
ファイル: hk.py プロジェクト: shineusn/RfPy
    def _residuals(self):
        """ 
        Internal method to obtain residuals between observed and predicted
        receiver functions given the Moho depth and Vp/Vs obtained from
        the Hk stack.
        """
        from telewavesim import utils

        # Simple 1-layer model over half-space
        model = utils.Model([self.h0, 0.], [2800., 3300.], [self.vp, 8.0],
                            [self.vp / self.k0, 4.5], ['iso', 'iso'])

        # Parameters for run
        slow = [tr.stats.slow for tr in self.rfV1]
        npts = self.rfV1[0].stats.npts
        dt = self.rfV1[0].stats.delta

        trR = Stream()

        for sl in slow:
            trxyz = utils.run_plane(model, sl, npts, dt)
            tfs = utils.tf_from_xyz(trxyz,
                                    pvh=True,
                                    vp=self.vp,
                                    vs=self.vp / self.k0)
            tfs[0].data = np.fft.fftshift(tfs[0].data)
            trR.append(tfs[0])

        trR.filter('bandpass',
                   freqmin=0.05,
                   freqmax=0.5,
                   corners=2,
                   zerophase=True)

        # Get stream of residuals
        res = trR.copy()
        for i in range(len(res)):
            res[i].data = self.rfV1[i].data - trR[i].data
        return res
コード例 #6
0
ファイル: stalta4uh_st.py プロジェクト: obspy/branches
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
        mutt += ("-a", outfilename)
コード例 #7
0
def correlate(io,
              day,
              outkey,
              edge=60,
              length=3600,
              overlap=1800,
              demean_window=True,
              discard=None,
              only_auto_correlation=False,
              station_combinations=None,
              component_combinations=('ZZ', ),
              max_lag=100,
              keep_correlations=False,
              stack='1d',
              njobs=0,
              **preprocessing_kwargs):
    """
    Correlate data of one day

    :param io: io config dictionary
    :param day: |UTC| object with day
    :param outkey: the output key for the HDF5 index
    :param edge: additional time span requested from day before and after
        in seconds
    :param length: length of correlation in seconds (string possible)
    :param overlap: length of overlap in seconds (string possible)
    :param demean_window: demean each window individually before correlating
    :param discard: discard correlations with less data coverage
        (float from interval [0, 1])
    :param only_auto_correlations: Only correlate stations with itself
        (different components possible)
    :param station_combinations: specify station combinations
        (e.g. ``'CX.PATCX-CX.PB01``, network code can be
        omitted, e.g. ``'PATCX-PB01'``, default: all)
    :param component_combinations: component combinations to calculate,
        tuple of strings with length two, e.g. ``('ZZ', 'ZN', 'RR')``,
        if ``'R'`` or ``'T'`` is specified, components will be rotated after
        preprocessing, default: only ZZ components
    :param max_lag: max time lag in correlations in seconds
    :param keep_correlatons: write correlations into HDF5 file (dafault: False)
    :param stack: stack correlations and write stacks into HDF5 file
        (default: ``'1d'``, must be smaller than one day or one day)

        .. note::

            If you want to stack larger time spans
            use the separate stack command on correlations or stacked
            correlations.

    :param njobs: number of jobs used. Some tasks will run parallel
        (preprocessing and correlation).
    :param \*\*preprocessing_kwargs: all other kwargs are passed to
        `preprocess`

    """
    inventory = io['inventory']
    length = _time2sec(length)
    overlap = _time2sec(overlap)
    if not keep_correlations and stack is None:
        msg = ('keep_correlation is False and stack is None -> correlations '
               ' would not be saved')
        raise ValueError(msg)
    components = set(''.join(component_combinations))
    if 'R' in components or 'T' in components:
        load_components = components - {'R', 'T'} | {'N', 'E'}
    else:
        load_components = components
    if station_combinations is not None:
        load_stations = set(sta for comb in station_combinations
                            for sta in comb.split('-'))
    else:
        load_stations = None
    # load data
    stream = obspy.Stream()
    for smeta in _iter_station_meta(inventory, load_components):
        if (load_stations is not None and smeta['station'] not in load_stations
                and '.'.join((smeta['network'], smeta['station']))
                not in load_stations):
            continue
        stream2 = get_data(smeta,
                           io['data'],
                           io['data_format'],
                           day,
                           overlap=overlap,
                           edge=edge)
        if stream2:
            stream += stream2
    if len(stream) == 0:
        log.warning('empty stream for day %s', str(day)[:10])
        return
    preprocess(stream,
               day,
               inventory,
               overlap=overlap,
               njobs=njobs,
               **preprocessing_kwargs)
    # collect trace pairs for correlation
    next_day = day + 24 * 3600
    stations = sorted({tr.id[:-1] for tr in stream})
    tasks = []
    for station1, station2 in itertools.combinations_with_replacement(
            stations, 2):
        if only_auto_correlation and station1 != station2:
            continue
        if station_combinations and not any(
                set(station_comb.split('-')) ==
            ({station1.rsplit('.', 2)[0],
              station2.rsplit('.', 2)[0]} if '.' in (station_comb) else
             {station1.
              split('.')[1], station2.split('.')[1]})
                for station_comb in station_combinations):
            continue
        stream1 = Stream([tr for tr in stream if tr.id[:-1] == station1])
        stream2 = Stream([tr for tr in stream if tr.id[:-1] == station2])
        datetime1 = _midtime(stream1[0].stats)
        datetime2 = _midtime(stream2[0].stats)
        msg = 'Cannot get coordinates for channel %s datetime %s'
        try:
            c1 = inventory.get_coordinates(stream1[0].id, datetime=datetime1)
        except Exception as ex:
            raise RuntimeError(msg % (stream1[0].id, datetime1)) from ex
        try:
            c2 = inventory.get_coordinates(stream2[0].id, datetime=datetime2)
        except Exception as ex:
            raise RuntimeError(msg % (stream2[0].id, datetime2)) from ex
        args = (c1['latitude'], c1['longitude'], c2['latitude'],
                c2['longitude'])
        dist, azi, baz = gps2dist_azimuth(*args)
        if ('R' in components or 'T' in components) and station1 != station2:
            stream1 = stream1.copy()
            stream1b = stream1.copy().rotate('NE->RT', azi)
            stream1.extend(stream1b.select(component='R'))
            stream1.extend(stream1b.select(component='T'))
            stream2 = stream2.copy()
            stream2b = stream2.copy().rotate('NE->RT', azi)
            stream2.extend(stream2b.select(component='R'))
            stream2.extend(stream2b.select(component='T'))
        it_ = (itertools.product(stream1, stream2) if station1 != station2 else
               itertools.combinations_with_replacement(stream1, 2))
        for tr1, tr2 in it_:
            comps = tr1.stats.channel[-1] + tr2.stats.channel[-1]
            if component_combinations and (comps not in component_combinations
                                           and comps[::-1]
                                           not in component_combinations):
                continue
            tasks.append((tr1, tr2, dist, azi, baz))
    # start correlation
    do_work = partial(_slide_and_correlate_traces, day, next_day, length,
                      overlap, discard, max_lag, outkey, demean_window)
    streams = start_parallel_jobs_inner_loop(tasks, do_work, njobs)
    xstream = Stream()
    xstream.traces = [tr for s_ in streams for tr in s_]
    if len(xstream) > 0:
        res = {}
        if keep_correlations:
            res['corr'] = xstream
        if stack:
            res['stack'] = yam.stack.stack(xstream, stack)
        return res
コード例 #8
0
class Seedlink_plotter(SLClient):
    """
    This module plots realtime seismic data from a Seedlink server
    """

    def __init__(self, figure, canvas, interval, backtrace, args):

        # Set the log level to display minimal info
        super(Seedlink_plotter, self).__init__(loglevel='CRITICAL')
#         super(Seedlink_plotter, self).__init__()
        self.figure = figure
        self.stream = Stream()
        self.interval = interval
        self.backtrace = backtrace
        self.canvas = canvas
        self.flip = 0
        self.scale = args.scale
        self.args = args
        self.initial_update_rate = 800
        self.update_rate = 2
        # Plot after geting the penultimate line of data
        self.print_percentage = (
            self.backtrace-60.0*self.interval)/self.backtrace
        self.print_max = (self.backtrace-60.0*self.interval)
        widgets = [FormatLabel('Receiving Data: - '), BouncingBar(
            marker=RotatingMarker())]
        self.pbar = ProgressBar(maxval=self.print_max, widgets=widgets).start()
#         print "max "+ str(self.print_max)

        # converter for the colors gradient
    def rgb_to_hex(self, r, g, b):
        return '#%02X%02X%02X' % (r, g, b)

        # Rainbow color generator
    def rainbow_color_generator(self, max_color):
        color_list = []
        frequency = 0.3
        for compteur_lignes in xrange(max_color):
            red = sin(frequency*compteur_lignes*2 + 0)*127+128
            green = sin(frequency*compteur_lignes*2 + 2)*127+128
            blue = sin(frequency*compteur_lignes*2 + 4)*127+128

            color_list.append(self.rgb_to_hex(red, green, blue))

        return tuple(color_list)

    def plot_graph(self):

        #######################################################################
        # filter section
        #######################################################################
        self.local_stream = self.stream.copy()
        # Filter example
#         self.local_stream.filter('bandpass', freqmin=0.001, freqmax=0.5,corners=2, zerophase=True)
        #######################################################################

        # With this upscale factor the graph  look nice !
        upscale_factor = 30

        if args.rainbow:
            # Rainbow colors !
            self.color = self.rainbow_color_generator(
                int(args.nb_rainbow_colors))
        else:
            # Regular colors
            self.color = ('#000000', '#ff0000', '#0000ff', '#56a83c')

        self.local_stream.plot(
            fig=self.figure, type='dayplot', interval=self.interval,
            number_of_ticks=13, tick_format='%d/%m %Hh',
            size=(args.x_size * upscale_factor, args.y_size * upscale_factor),
            x_labels_size=8,
            y_labels_size=8, title=self.title, title_size=14, linewidth=0.5, right_vertical_labels=False,
            vertical_scaling_range=self.scale,
            subplots_adjust_left=0.03, subplots_adjust_right=0.99,
            subplots_adjust_top=0.95, subplots_adjust_bottom=0.1,
            one_tick_per_line=True,
            # noir  Rouge bleu vert
            color = self.color,
            show_y_UTC_label=False)

    def packetHandler(self, count, slpack):
        """
        Processes each packet received from the SeedLinkConnection.
        :type count: int
        :param count:  Packet counter.
        :type slpack: :class:`~obspy.seedlink.SLPacket`
        :param slpack: packet to process.
        :return: Boolean true if connection to SeedLink server should be
            closed and session terminated, false otherwise.
        """

        # check if not a complete packet
        if slpack is None or (slpack == SLPacket.SLNOPACKET) or \
                (slpack == SLPacket.SLERROR):
            return False

        # get basic packet info
        type = slpack.getType()

        # process INFO packets here
        if (type == SLPacket.TYPE_SLINF):
            return False
        if (type == SLPacket.TYPE_SLINFT):
#             print "Complete INFO:\n" + self.slconn.getInfoString()
            if self.infolevel is not None:
                return True
            else:
                return False

        # process packet data
        trace = slpack.getTrace()
        if trace is None:
            print self.__class__.__name__ + ": blockette contains no trace"
            return False

        # new samples add to the main stream
        self.stream += trace
        self.stream.merge()

        now = UTCDateTime()

        # Stop time will be the next round date
        stop_time = UTCDateTime(
            now.year, now.month, now.day, now.hour, 0, 0)+3600
        start_time = stop_time-self.backtrace

        # Limit the stream size
        self.stream = self.stream.slice(start_time, stop_time)
        self.stream.trim(start_time, stop_time)

        self.title = self.stream.traces[0].stats.station+" "+self.stream.traces[0].stats.network+" "+self.stream.traces[
            0].stats.location+" "+self.stream.traces[0].stats.channel+' scale: '+str(self.scale) + " - non filtre"

        stream_time_length = self.stream.traces[
            0].stats.endtime - self.stream.traces[0].stats.starttime

        ### Before we reach  print_percentage of the time data to plot, we plot each initial_update_rate we received
#         if (stream_time_length < (self.backtrace*self.print_percentage)):
#        if ((stream_time_length))<(self.backtrace-60.0*self.interval):
#         print str(stream_time_length)+"/"+str(self.print_max)
        if stream_time_length <= self.print_max:

            self.flip += 1

#             if ((stream_time_length))<(self.backtrace-60.0*self.interval):
            self.pbar.update(stream_time_length+1)
#             print str(stream_time_length)+"/"+str(self.print_max)
            if (self.flip > self.initial_update_rate):
                self.flip = 0
                self.figure.clear()
                self.plot_graph()

 #             self.pbar.finish()

        # Real time plotting
        # We plot each update_rate packet we received
        # if (stream_time_length >= (self.backtrace*self.print_percentage)):
        if ((stream_time_length)) > (self.print_max):
#             print str(stream_time_length)+"/"+str(self.print_max)

            self.flip += 1
            if (self.flip > self.update_rate):
                self.figure.clear()
                self.plot_graph()
                self.flip = 0

        return False
コード例 #9
0
fs = st2[0].stats.sampling_rate
if debug:
    print("Sample rate is ", fs)
total_count = Window * fs

# Start of Sliding window loop to calculate orientation angle
for stW in st.slide(Window, (1. - Overlap) * Window):
    print('On day: ' + str(stW[0].stats.starttime))

    # Get start and end times and trim accordingly
    starttime = stW[0].stats.starttime

    endtime = starttime + Window

    st2W = st2.copy()
    st2W.trim(starttime, endtime)

    stPW = stP.copy()
    stPW.trim(starttime, endtime)

    # Fill data gaps and detrend
    stW.merge(fill_value=0.)
    st2W.merge(fill_value=0.)
    stPW.merge(fill_value=0.)

    data_count = np.count_nonzero(st2W[0].data)
    if data_count / total_count >= Comp_Thres:

        try:
コード例 #10
0
ファイル: template_gen.py プロジェクト: travisalongi/Cascadia
    if st.count() > 0: # need waveforms to continue
        std = Stream()
        for tr in st:
            num = tr.stats.npts
            samp = tr.stats.sampling_rate             
            if num >= (samp*86400)*.8:
                std.append(tr)
        
        print('number of good waveforms ', std.count())
        if std.count() < 3: # want 3 or more waveforms for templates
            print('skipping event not enough good waveforms')
            
        else:         
            std.sort(['starttime'])
            std.merge(fill_value="interpolate")
            st1=std.copy()
            
            start = UTCDateTime(year = yr, julday = days)
            end = start + 86400
            st_filter=st1.trim(starttime=start, endtime=end)
        
#            print('GENERATING TEMPLATE FOR ' + str(start) +   ' SAVING AS MINISEED FILES & PLOTS ARE SAVED TO FOLDER.')
            # template matching
            template = template_gen.from_meta_file(meta_file = new_catalog, st = st_filter,
                                                   lowcut = 3, highcut = 10, filt_order = 4, samp_rate = 25,
                                                   prepick = 0.15, length = 4.6, swin = 'P',
                                                   parallel = True)
            
            if len(template[0]) < 3:
                print('Skipping template -- %i picks & %i WF in template' % (n_picks[i], len(template[0])))
                
コード例 #11
0
ファイル: waveform.py プロジェクト: obspy/branches
class WaveformPlotting(object):
    """
    Class that provides several solutions for plotting large and small waveform
    data sets.

    .. warning::
        This class should NOT be used directly, instead use the
        :meth:`~obspy.core.stream.Stream.plot` method of the
        ObsPy :class:`~obspy.core.stream.Stream` or
        :class:`~obspy.core.trace.Trace` objects.

    It uses matplotlib to plot the waveforms.
    """

    def __init__(self, **kwargs):
        """
        Checks some variables and maps the kwargs to class variables.
        """
        self.stream = kwargs.get('stream')
        # Check if it is a Stream or a Trace object.
        if isinstance(self.stream, Trace):
            self.stream = Stream([self.stream])
        elif not isinstance(self.stream, Stream):
            msg = 'Plotting is only supported for Stream or Trace objects.'
            raise TypeError(msg)
        # Stream object should contain at least one Trace
        if len(self.stream) < 1:
            msg = "Empty object"
            raise IndexError(msg)
        # Type of the plot.
        self.type = kwargs.get('type', 'normal')
        # Start- and endtimes of the plots.
        self.starttime = kwargs.get('starttime', None)
        self.endtime = kwargs.get('endtime', None)
        self.fig_obj = kwargs.get('fig', None)
        # If no times are given take the min/max values from the stream object.
        if not self.starttime:
            self.starttime = min([trace.stats.starttime for \
                             trace in self.stream])
        if not self.endtime:
            self.endtime = max([trace.stats.endtime for \
                           trace in self.stream])
        # Map stream object and slice just in case.
        self.stream = self.stream.slice(self.starttime, self.endtime)
        # normalize times
        if self.type == 'relative':
            dt = self.starttime
            # fix plotting boundaries
            self.endtime = UTCDateTime(self.endtime - self.starttime)
            self.starttime = UTCDateTime(0)
            # fix stream times
            for tr in self.stream:
                tr.stats.starttime = UTCDateTime(tr.stats.starttime - dt)
        # Whether to use straight plotting or the fast minmax method.
        self.plotting_method = kwargs.get('method', 'fast')
        # Below that value the data points will be plotted normally. Above it
        # the data will be plotted using a different approach (details see
        # below). Can be overwritten by the above self.plotting_method kwarg.
        self.max_npts = 400000
        # If automerge is enabled. Merge traces with the same id for the plot.
        self.automerge = kwargs.get('automerge', True)
        # Set default values.
        # The default value for the size is determined dynamically because
        # there might be more than one channel to plot.
        self.size = kwargs.get('size', None)
        # Values that will be used to calculate the size of the plot.
        self.default_width = 800
        self.default_height_per_channel = 250
        if not self.size:
            self.width = 800
            # Check the kind of plot.
            if self.type == 'dayplot':
                self.height = 600
            else:
                # One plot for each trace.
                if self.automerge:
                    count = []
                    for tr in self.stream:
                        if hasattr(tr.stats, 'preview') and tr.stats.preview:
                            tr_id = tr.id + 'preview'
                        else:
                            tr_id = tr.id
                        if not tr_id in count:
                            count.append(tr_id)
                    count = len(count)
                else:
                    count = len(self.stream)
                self.height = count * 250
        else:
            self.width, self.height = self.size
        # Interval length in minutes for dayplot.
        self.interval = 60 * kwargs.get('interval', 15)
        # Scaling.
        self.vertical_scaling_range = kwargs.get('vertical_scaling_range',
                                                 None)
        # Dots per inch of the plot. Might be useful for printing plots.
        self.dpi = kwargs.get('dpi', 100)
        # Color of the graph.
        if self.type == 'dayplot':
            self.color = kwargs.get('color', ('#000000','#B2000F', '#004C12',
                                              '#0E01FF'))
            if isinstance(self.color, basestring):
                self.color = (self.color,)
            self.number_of_ticks = kwargs.get('number_of_ticks', None)
        else:
            self.color = kwargs.get('color', 'k')
            self.number_of_ticks = kwargs.get('number_of_ticks', 5)
        # Background and face color.
        self.background_color = kwargs.get('bgcolor', 'w')
        self.face_color = kwargs.get('face_color', 'w')
        # Transparency. Overwrites background and facecolor settings.
        self.transparent = kwargs.get('transparent', False)
        if self.transparent:
            self.background_color = None
        # Ticks.
        self.tick_format = kwargs.get('tick_format', '%H:%M:%S')
        self.tick_rotation = kwargs.get('tick_rotation', 0)
        # Whether or not to save a file.
        self.outfile = kwargs.get('outfile')
        self.handle = kwargs.get('handle')
        # File format of the resulting file. Usually defaults to PNG but might
        # be dependent on your matplotlib backend.
        self.format = kwargs.get('format')

    def plotWaveform(self, *args, **kwargs):
        """
        Creates a graph of any given ObsPy Stream object. It either saves the
        image directly to the file system or returns an binary image string.

        For all color values you can use legit HTML names, HTML hex strings
        (e.g. '#eeefff') or you can pass an R , G , B tuple, where each of
        R , G , B are in the range [0, 1]. You can also use single letters for
        basic built-in colors ('b' = blue, 'g' = green, 'r' = red, 'c' = cyan,
        'm' = magenta, 'y' = yellow, 'k' = black, 'w' = white) and gray shades
        can be given as a string encoding a float in the 0-1 range.
        """
        # Setup the figure if not passed explicitly.
        if not self.fig_obj:
            self.__setupFigure()
        else:
            self.fig = self.fig_obj
        # Determine kind of plot and do the actual plotting.
        if self.type == 'dayplot':
            self.plotDay(*args, **kwargs)
        else:
            self.plot(*args, **kwargs)
        # Adjust the subplot so there is always a margin of 80 px on every
        # side except for plots with just a single trace.
        if self.type != 'dayplot':
            if self.height >= 400:
                fract_y = 80.0 / self.height
            else:
                fract_y = 25.0 / self.height
            fract_x = 80.0 / self.width
            self.fig.subplots_adjust(top=1.0 - fract_y, bottom=fract_y,
                                     left=fract_x, right=1 - fract_x)
        self.fig.canvas.draw()
        # The following just serves as a unified way of saving and displaying
        # the plots.
        if not self.transparent:
            extra_args = {'dpi': self.dpi,
                          'facecolor': self.face_color,
                          'edgecolor': self.face_color}
        else:
            extra_args = {'dpi': self.dpi,
                          'transparent': self.transparent}
        if self.outfile:
            # If format is set use it.
            if self.format:
                self.fig.savefig(self.outfile, format=self.format,
                                 **extra_args)
            # Otherwise use format from self.outfile or default to PNG.
            else:
                self.fig.savefig(self.outfile, **extra_args)
        else:
            # Return an binary imagestring if not self.outfile but self.format.
            if self.format:
                imgdata = StringIO.StringIO()
                self.fig.savefig(imgdata, format=self.format,
                                 **extra_args)
                imgdata.seek(0)
                return imgdata.read()
            elif self.handle:
                return self.fig
            else:
                if not self.fig_obj:
                    plt.show()

    def plot(self, *args, **kwargs):
        """
        Plot the Traces showing one graph per Trace.

        Plots the whole time series for self.max_npts points and less. For more
        points it plots minmax values.
        """
        stream_new = []
        # Just remove empty traces.
        if not self.automerge:
            for tr in self.stream:
                stream_new.append([])
                if len(tr.data):
                    stream_new[-1].append(tr)
        else:
            # Generate sorted list of traces (no copy)
            # Sort order, id, starttime, endtime
            ids = []
            for tr in self.stream:
                if hasattr(tr.stats, 'preview') and tr.stats.preview:
                    id = tr.id + 'preview'
                else:
                    id = tr.id
                if not id in ids:
                    ids.append(id)
            for id in ids:
                stream_new.append([])
                for tr in self.stream:
                    if hasattr(tr.stats, 'preview') and tr.stats.preview:
                        tr_id = tr.id + 'preview'
                    else:
                        tr_id = tr.id
                    if tr_id == id:
                        # does not copy the elements of the data array
                        tr_ref = copy(tr)
                        # Trim does nothing if times are outside
                        if self.starttime >= tr_ref.stats.endtime or \
                                self.endtime <= tr_ref.stats.starttime:
                            continue
                        if tr_ref.data.size:
                            stream_new[-1].append(tr_ref)
                # delete if empty list
                if not len(stream_new[-1]):
                    stream_new.pop()
                    continue
                stream_new[-1].sort(key=lambda x: x.stats.endtime)
                stream_new[-1].sort(key=lambda x: x.stats.starttime)
        # If everything is lost in the process raise an Exception.
        if not len(stream_new):
            raise Exception("Nothing to plot")
        # Create helper variable to track ids and min/max/mean values.
        self.stats = []
        # Loop over each Trace and call the appropriate plotting method.
        self.axis = []
        for _i, tr in enumerate(stream_new):
            # Each trace needs to have the same sampling rate.
            sampling_rates = set([_tr.stats.sampling_rate for _tr in tr])
            if len(sampling_rates) > 1:
                msg = "All traces with the same id need to have the same " + \
                      "sampling rate."
                raise Exception(msg)
            sampling_rate = sampling_rates.pop()
            if self.background_color:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1,
                                          axisbg=self.background_color)
            else:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1)
            self.axis.append(ax)
            # XXX: Also enable the minmax plotting for previews.
            if self.plotting_method != 'full' and \
                ((self.endtime - self.starttime) * sampling_rate > \
                 self.max_npts):
                self.__plotMinMax(stream_new[_i], ax, *args, **kwargs)
            else:
                self.__plotStraight(stream_new[_i], ax, *args, **kwargs)
        # Set ticks.
        self.__plotSetXTicks()
        self.__plotSetYTicks()

    def plotDay(self, *args, **kwargs):
        """
        Extend the seismogram.
        """
        # Create a copy of the stream because it might be operated on.
        self.stream = self.stream.copy()
        # Merge and trim to pad.
        self.stream.merge()
        if len(self.stream) != 1:
            msg = "All traces need to be of the same id for a dayplot"
            raise ValueError(msg)
        self.stream.trim(self.starttime, self.endtime, pad=True)
        # Get minmax array.
        self.__dayplotGetMinMaxValues(self, *args, **kwargs)
        # Normalize array
        self.__dayplotNormalizeValues(self, *args, **kwargs)
        # Get timezone information. If none is  given, use local time.
        self.time_offset = kwargs.get('time_offset',
                           round((UTCDateTime(datetime.now()) - \
                           UTCDateTime()) / 3600.0, 2))
        self.timezone = kwargs.get('timezone', 'local time')
        # Try to guess how many steps are needed to advance one full time unit.
        self.repeat = None
        intervals = self.extreme_values.shape[0]
        if self.interval < 60 and 60 % self.interval == 0:
            self.repeat = 60 / self.interval
        elif self.interval < 1800 and 3600 % self.interval == 0:
            self.repeat = 3600 / self.interval
        # Otherwise use a maximum value of 10.
        else:
            if intervals >= 10:
                self.repeat = 10
            else:
                self.repeat = intervals
        # Create axis to plot on.
        if self.background_color:
            ax = self.fig.add_subplot(1, 1, 1, axisbg=self.background_color)
        else:
            ax = self.fig.add_subplot(1, 1, 1)
        # Adjust the subplots to be symmetrical. Also make some more room
        # at the top.
        self.fig.subplots_adjust(left=0.12, right=0.88, top=0.88)
        # Create x_value_array.
        aranged_array = np.arange(self.width)
        x_values = np.empty(2 * self.width)
        x_values[0::2] = aranged_array
        x_values[1::2] = aranged_array
        intervals = self.extreme_values.shape[0]
        # Loop over each step.
        for _i in xrange(intervals):
            # Create offset array.
            y_values = np.ma.empty(self.width * 2)
            y_values.fill(intervals - (_i + 1))
            # Add min and max values.
            y_values[0::2] += self.extreme_values[_i, :, 0]
            y_values[1::2] += self.extreme_values[_i, :, 1]
            # Plot the values.
            ax.plot(x_values, y_values,
                    color=self.color[_i % len(self.color)])
        # Set ranges.
        ax.set_xlim(0, self.width - 1)
        ax.set_ylim(-0.3, intervals + 0.3)
        self.axis = [ax]
        # Set ticks.
        self.__dayplotSetYTicks()
        self.__dayplotSetXTicks()
        # Choose to show grid but only on the x axis.
        self.fig.axes[0].grid()
        self.fig.axes[0].yaxis.grid(False)
        # Set the title of the plot.
        #suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
        #self.fig.suptitle(suptitle, fontsize='small')

    def __plotStraight(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Just plots the data samples in the self.stream. Useful for smaller
        datasets up to around 1000000 samples (depending on the machine its
        being run on).

        Slow and high memory consumption for large datasets.
        """
        # Copy to avoid any changes to original data.
        trace = deepcopy(trace)
        if len(trace) > 1:
            stream = Stream(traces=trace)
            # Merge with 'interpolation'. In case of overlaps this method will
            # always use the longest available trace.
            if hasattr(trace[0].stats, 'preview') and trace[0].stats.preview:
                stream = Stream(traces=stream)
                stream = mergePreviews(stream)
            else:
                stream.merge(method=1)
            trace = stream[0]
        else:
            trace = trace[0]
        # Check if it is a preview file and adjust accordingly.
        # XXX: Will look weird if the preview file is too small.
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            # Mask the gaps.
            trace.data = np.ma.masked_array(trace.data)
            trace.data[trace.data == -1] = np.ma.masked
            # Recreate the min_max scene.
            dtype = trace.data.dtype
            old_time_range = trace.stats.endtime - trace.stats.starttime
            data = np.empty(2 * trace.stats.npts, dtype=dtype)
            data[0::2] = trace.data / 2.0
            data[1::2] = -trace.data / 2.0
            trace.data = data
            # The times are not supposed to change.
            trace.stats.delta = old_time_range / float(trace.stats.npts - 1)
        # Write to self.stats.
        calib = trace.stats.calib
        max = trace.data.max()
        min = trace.data.min()
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            tr_id = trace.id + ' [preview]'
        else:
            tr_id = trace.id
        self.stats.append([tr_id, calib * trace.data.mean(),
                           calib * min, calib * max])
        # Pad the beginning and the end with masked values if necessary. Might
        # seem like overkill but it works really fast and is a clean solution
        # to gaps at the beginning/end.
        concat = [trace]
        if self.starttime != trace.stats.starttime:
            samples = (trace.stats.starttime - self.starttime) * \
                trace.stats.sampling_rate
            temp = [np.ma.masked_all(int(samples))]
            concat = temp.extend(concat)
            concat = temp
        if self.endtime != trace.stats.endtime:
            samples = (self.endtime - trace.stats.endtime) * \
                      trace.stats.sampling_rate
            concat.append(np.ma.masked_all(int(samples)))
        if len(concat) > 1:
            # Use the masked array concatenate, otherwise it will result in a
            # not masked array.
            trace.data = np.ma.concatenate(concat)
            # set starttime and calculate endtime
            trace.stats.starttime = self.starttime
        trace.data *= calib
        ax.plot(trace.data, color=self.color)
        # Set the x limit for the graph to also show the masked values at the
        # beginning/end.
        ax.set_xlim(0, len(trace.data) - 1)

    def __plotMinMax(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Plots the data using a min/max approach that calculated the minimum and
        maximum values of each "pixel" and than plots only these values. Works
        much faster with large data sets.
        """
        # Some variables to help calculate the values.
        starttime = self.starttime.timestamp
        endtime = self.endtime.timestamp
        # The same trace will always have the same sampling_rate.
        sampling_rate = trace[0].stats.sampling_rate
        # The samples per resulting pixel.
        pixel_length = int((endtime - starttime) / self.width *
                           sampling_rate)
        # Loop over all the traces. Do not merge them as there are many samples
        # and therefore merging would be slow.
        for _i, _t in enumerate(trace):
            # Get the start of the next pixel in case the starttime of the
            # trace does not match the starttime of the plot.
            ts = _t.stats.starttime
            if ts > self.starttime:
                start = int(ceil(((ts - self.starttime) * \
                        sampling_rate) / pixel_length))
                # Samples before start.
                prestart = int(((self.starttime + start * pixel_length /
                           sampling_rate) - ts) * sampling_rate)
            else:
                start = 0
                prestart = 0
            # Figure out the number of pixels in the current trace.
            length = len(_t.data) - prestart
            pixel_count = int(length // pixel_length)
            rest = int(length % pixel_length)
            # Reference to new data array which does not copy data but is
            # reshapeable.
            data = _t.data[prestart: prestart + pixel_count * pixel_length]
            data = data.reshape(pixel_count, pixel_length)
            # Calculate extreme_values and put them into new array.
            extreme_values = np.ma.masked_all((self.width, 2), dtype=np.float)
            min = data.min(axis=1) * _t.stats.calib
            max = data.max(axis=1) * _t.stats.calib
            extreme_values[start: start + pixel_count, 0] = min
            extreme_values[start: start + pixel_count, 1] = max
            # First and last and last pixel need separate treatment.
            if start and prestart:
                extreme_values[start - 1, 0] = \
                    _t.data[:prestart].min() * _t.stats.calib
                extreme_values[start - 1, 1] = \
                    _t.data[:prestart].max() * _t.stats.calib
            if rest:
                if start + pixel_count == self.width:
                    index = self.width - 1
                else:
                    index = start + pixel_count
                extreme_values[index, 0] = \
                    _t.data[-rest:].min() * _t.stats.calib
                extreme_values[index, 1] = \
                    _t.data[-rest:].max() * _t.stats.calib
            # Use the first array as a reference and merge all following
            # extreme_values into it.
            if _i == 0:
                minmax = extreme_values
            else:
                # Merge minmax and extreme_values.
                min = np.ma.empty((self.width, 2))
                max = np.ma.empty((self.width, 2))
                # Fill both with the values.
                min[:, 0] = minmax[:, 0]
                min[:, 1] = extreme_values[:, 0]
                max[:, 0] = minmax[:, 1]
                max[:, 1] = extreme_values[:, 1]
                # Find the minimum and maximum values.
                min = min.min(axis=1)
                max = max.max(axis=1)
                # Write again to minmax.
                minmax[:, 0] = min
                minmax[:, 1] = max
        # Write to self.stats.
        self.stats.append([trace[0].id, minmax.mean(),
                           minmax[:, 0].min(),
                           minmax[:, 1].max()])
        # Finally plot the data.
        x_values = np.empty(2 * self.width)
        aranged = np.arange(self.width)
        x_values[0::2] = aranged
        x_values[1::2] = aranged
        # Initialize completely masked array. This version is a little bit
        # slower than first creating an empty array and then setting the mask
        # to True. But on NumPy 1.1 this results in a 0-D array which can not
        # be indexed.
        y_values = np.ma.masked_all(2 * self.width)
        y_values[0::2] = minmax[:, 0]
        y_values[1::2] = minmax[:, 1]
        ax.plot(x_values, y_values, color=self.color)
        # Set the x-limit to avoid clipping of masked values.
        ax.set_xlim(0, self.width - 1)

    def __plotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot and sets time ticks on the x axis.
        """
        # Loop over all axes.
        for ax in self.axis:
            # Get the xlimits.
            start, end = ax.get_xlim()
            # Set the location of the ticks.
            ax.set_xticks(np.linspace(start, end, self.number_of_ticks))
            # Figure out times.
            interval = float(self.endtime - self.starttime) / \
                       (self.number_of_ticks - 1)
            # Set the actual labels.
            if self.type == 'relative':
                s = ['%.2f' % (self.starttime + _i * interval).timestamp \
                          for _i in range(self.number_of_ticks)]
            else:
                labels = [(self.starttime + _i * \
                          interval).strftime(self.tick_format) for _i in \
                          range(self.number_of_ticks)]

            ax.set_xticklabels(labels, fontsize='small',
                               rotation=self.tick_rotation)

    def __plotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot, reads self.stats and sets all ticks on
        the y axis.

        This method also adjusts the y limits so that the mean value is always
        in the middle of the graph and all graphs are equally scaled.
        """
        # Figure out the maximum distance from the mean value to either end.
        # Add 10 percent for better looking graphs.
        max_distance = max([max(trace[1] - trace[2], trace[3] - trace[1])
                            for trace in self.stats]) * 1.1
        # Loop over all axes.
        for _i, ax in enumerate(self.axis):
            mean = self.stats[_i][1]
            # Set the ylimit.
            min_range = mean - max_distance
            max_range = mean + max_distance
            # Set the location of the ticks.
            ticks = [mean - 0.75 * max_distance,
                     mean - 0.5 * max_distance,
                     mean - 0.25 * max_distance,
                     mean,
                     mean + 0.25 * max_distance,
                     mean + 0.5 * max_distance,
                     mean + 0.75 * max_distance]
            ax.set_yticks(ticks)
            # Setup format of the major ticks
            if max(ticks) - min(ticks) > 10:
                fmt = '%d'
            else:
                fmt = '%.2g'
            ax.set_yticklabels([fmt % t for t in ax.get_yticks()],
                               fontsize='small')
            # Set the title of each plot.
            ax.set_title(self.stats[_i][0], horizontalalignment='left',
                      fontsize='small', verticalalignment='center')
            ax.set_ylim(min_range, max_range)

    def __dayplotGetMinMaxValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Takes a Stream object and calculates the min and max values for each
        pixel in the dayplot.

        Writes a three dimensional array. The first axis is the step, i.e
        number of trace, the second is the pixel in that step and the third
        contains the minimum and maximum value of the pixel.
        """
        # Helper variables for easier access.
        trace = self.stream[0]
        trace_length = len(trace.data)

        # Samples per interval.
        spi = int(self.interval * trace.stats.sampling_rate)
        # Check the approximate number of samples per pixel and raise
        # error as fit.
        spp = float(spi) / self.width
        if spp < 1.0:
            msg = """
            Too few samples to use dayplot with the given arguments.
            Adjust your arguments or use a different plotting method.
            """
            msg = " ".join(msg.strip().split())
            raise ValueError(msg)
        # Number of intervals plotted.
        noi = float(trace_length) / spi
        inoi = int(round(noi))
        # Plot an extra interval if at least 2 percent of the last interval
        # will actually contain data. Do it this way to lessen floating point
        # inaccuracies.
        if abs(noi - inoi) > 2E-2:
            noi = inoi + 1
        else:
            noi = inoi

        # Adjust data. Fill with masked values in case it is necessary.
        number_of_samples = noi * spi
        delta = number_of_samples - trace_length
        if delta < 0:
            trace.data = trace.data[:number_of_samples]
        elif delta > 0:
            trace.data = np.ma.concatenate([trace.data,
                            createEmptyDataChunk(delta, trace.data.dtype)])

        # Create array for min/max values. Use masked arrays to handle gaps.
        extreme_values = np.ma.empty((noi, self.width, 2))
        trace.data.shape = (noi, spi)

        ispp = int(spp)
        fspp = spp % 1.0
        if fspp == 0.0:
            delta = None
        else:
            delta = spi - ispp * self.width

        # Loop over each interval to avoid larger errors towards the end.
        for _i in range(noi):
            if delta:
                cur_interval = trace.data[_i][:-delta]
                rest = trace.data[_i][-delta:]
            else:
                cur_interval = trace.data[_i]
            cur_interval.shape = (self.width, ispp)
            extreme_values[_i, :, 0] = cur_interval.min(axis=1)
            extreme_values[_i, :, 1] = cur_interval.max(axis=1)
            # Add the rest.
            if delta:
                extreme_values[_i, -1, 0] = min(extreme_values[_i, -1, 0],
                                                rest.min())
                extreme_values[_i, -1, 1] = max(extreme_values[_i, -1, 0],
                                                rest.max())
        # Set class variable.
        self.extreme_values = extreme_values

    def __dayplotNormalizeValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Normalizes all values in the 3 dimensional array, so that the minimum
        value will be 0 and the maximum value will be 1.

        It will also convert all values to floats.
        """
        # Convert to native floats.
        self.extreme_values = self.extreme_values.astype(np.float) * \
                              self.stream[0].stats.calib
        # Make sure that the mean value is at 0
        self.extreme_values -= self.extreme_values.mean()

        # Scale so that 99.5 % of the data will fit the given range.
        if self.vertical_scaling_range is None:
            percentile_delta = 0.005
            max_values = self.extreme_values[:, :, 1].compressed()
            min_values = self.extreme_values[:, :, 0].compressed()
            # Remove masked values.
            max_values.sort()
            min_values.sort()
            length = len(max_values)
            index = int((1.0 - percentile_delta) * length)
            max_val = max_values[index]
            index = int(percentile_delta * length)
            min_val = min_values[index]
        # Exact fit.
        elif float(self.vertical_scaling_range) == 0.0:
            max_val = self.extreme_values[:, :, 1].max()
            min_val = self.extreme_values[:, :, 0].min()
        # Fit with custom range.
        else:
            max_val = min_val = abs(self.vertical_scaling_range) / 2.0

        # Scale from 0 to 1.
        self.extreme_values = self.extreme_values / (max(abs(max_val),
                                                         abs(min_val)) * 2)
        self.extreme_values += 0.5

    def __dayplotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the xticks for the dayplot.
        """
        max_value = self.width - 1
        # Check whether it are sec/mins/hours and convert to a universal unit.
        if self.interval < 240:
            time_type = 'seconds'
            time_value = self.interval
        elif self.interval < 24000:
            time_type = '[min]'
            time_value = self.interval / 60
        else:
            time_type = 'hours'
            time_value = self.interval / 3600
        count = None
        # Hardcode some common values. The plus one is itentional. It had
        # hardly any performance impact and enhances readability.
        if self.interval == 15 * 60:
            count = 15 + 1
        elif self.interval == 20 * 60:
            count = 4 + 1
        elif self.interval == 30 * 60:
            count = 6 + 1
        elif self.interval == 60 * 60:
            count = 4 + 1
        elif self.interval == 90 * 60:
            count = 6 + 1
        elif self.interval == 120 * 60:
            count = 4 + 1
        elif self.interval == 180 * 60:
            count = 6 + 1
        elif self.interval == 240 * 60:
            count = 6 + 1
        elif self.interval == 300 * 60:
            count = 6 + 1
        elif self.interval == 360 * 60:
            count = 12 + 1
        elif self.interval == 720 * 60:
            count = 12 + 1
        # Otherwise run some kind of autodetection routine.
        if not count:
            # Up to 15 time units and if its a full number, show every unit.
            if time_value <= 15 and time_value % 1 == 0:
                count = time_value
            # Otherwise determine whether they are dividable for numbers up to
            # 15. If a number is not dividable just show 10 units.
            else:
                count = 10
                for _i in xrange(15, 1, -1):
                    if time_value % _i == 0:
                        count = _i
                        break
            # Show at least 5 ticks.
            if count < 5:
                count = 5
        # Everything can be overwritten by user specified number of ticks.
        if self.number_of_ticks:
            count = self.number_of_ticks
        # Calculate and set ticks.
        ticks = np.linspace(0.0, max_value, count)
        ticklabels = ['%i' % _i for _i in np.linspace(0.0,
                                    time_value, count)]
        self.axis[0].set_xticks(ticks)
        self.axis[0].set_xticklabels(ticklabels, rotation=self.tick_rotation)
        self.axis[0].set_xlabel('Zeit %s' % time_type)

    def __dayplotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the yticks for the dayplot.
        """
        intervals = self.extreme_values.shape[0]
        # Do not display all ticks except if it are five or less steps.
        if intervals <= 5:
            tick_steps = range(0, intervals)
            ticks = np.arange(intervals, 0, -1, dtype=np.float)
            ticks -= 0.5
        else:
            tick_steps = range(0, intervals, self.repeat)
            ticks = np.arange(intervals, 0, -1 * self.repeat, dtype=np.float)
            ticks -= 0.5
        ticklabels = [(self.starttime + (_i + 1) * self.interval + \
                      self.time_offset * 3600).strftime('%H:%M') \
                      for _i in tick_steps]

        self.axis[0].set_yticks(ticks)
        self.axis[0].set_yticklabels(ticklabels)
        # self.axis[0].set_ylabel('UTC')
        # Save range.
        yrange = self.axis[0].get_ylim()
        # Create twin axis.
        #XXX
        self.twin = self.axis[0].twinx()
        self.twin.set_ylim(yrange)
        self.twin.set_yticks(ticks)
        ticklabels = [(self.starttime + _i * self.interval).strftime('%H:%M') \
                      for _i in tick_steps]
        self.twin.set_yticklabels(ticklabels)
        # Complicated way to calculate the label of the y-Axis showing the
        # second time zone.
        sign = '%+i' % self.time_offset
        sign = sign[0]
      #  time_label = self.timezone.strip() + ' (UTC%s%02i:%02i)' % \
        time_label = 'Berlin' + ' (UTC%s%02i:%02i)' % \
                     (sign, abs(self.time_offset), (self.time_offset % 1 * 60))
        self.axis[0].set_ylabel(time_label)
        self.twin.set_ylabel('UTC')

    def __setupFigure(self):
        """
        The design and look of the whole plot to be produced.
        """
        # Setup figure and axes
        self.fig = plt.figure(num=None, dpi=self.dpi,
                              figsize=(float(self.width) / self.dpi,
                                       float(self.height) / self.dpi))
        # XXX: Figure out why this is needed sometimes.
        # Set size and dpi.
        self.fig.set_dpi(self.dpi)
        self.fig.set_figwidth(float(self.width) / self.dpi)
        self.fig.set_figheight(float(self.height) / self.dpi)
        # hide time information if set as option
        if self.type == 'relative':
            return
        if self.type == 'dayplot':
            suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
            self.fig.suptitle(suptitle, y=0.94, fontsize='small')
        else:
           pattern = '%Y-%m-%dT%H:%M:%SZ'
           suptitle = '%s  -  %s' % (self.starttime.strftime(pattern),
                 self.endtime.strftime(pattern))
           self.fig.suptitle(suptitle, x=0.02, y=0.96, fontsize='small',
                 horizontalalignment='left')
コード例 #12
0
def download_data(client=None,
                  sta=None,
                  start=None,
                  end=None,
                  stdata=[],
                  ndval=nan,
                  new_sr=0.,
                  verbose=False):
    """
    Function to build a stream object for a seismogram in a given time window either
    by downloading data from the client object or alternatively first checking if the
    given data is already available locally.

    Note 
    ----
    Currently only supports NEZ Components!

    Parameters
    ----------
    client : :class:`~obspy.client.fdsn.Client`
        Client object
    sta : Dict
        Station metadata from :mod:`~StDb` data base
    start : :class:`~obspy.core.utcdatetime.UTCDateTime`
        Start time for request
    end : :class:`~obspy.core.utcdatetime.UTCDateTime`
        End time for request
    stdata : List
        Station list
    ndval : float or nan
        Default value for missing data

    Returns
    -------
    err : bool
        Boolean for error handling (`False` is associated with success)
    trN : :class:`~obspy.core.Trace`
        Trace of North component of motion
    trE : :class:`~obspy.core.Trace`
        Trace of East component of motion
    trZ : :class:`~obspy.core.Trace` 
        Trace of Vertical component of motion

    """

    from fnmatch import filter
    from obspy import read, Stream
    from os.path import dirname, join, exists
    from numpy import any
    from math import floor

    # Output
    print(("*     {0:s}.{1:2s} - ZNE:".format(sta.station,
                                              sta.channel.upper())))

    # Set Error Default to True
    erd = True

    # Check if there is local data
    if len(stdata) > 0:
        # Only a single day: Search for local data
        # Get Z localdata
        errZ, stZ = parse_localdata_for_comp(comp='Z',
                                             stdata=stdata,
                                             sta=sta,
                                             start=start,
                                             end=end,
                                             ndval=ndval)
        # Get N localdata
        errN, stN = parse_localdata_for_comp(comp='N',
                                             stdata=stdata,
                                             sta=sta,
                                             start=start,
                                             end=end,
                                             ndval=ndval)
        # Get E localdata
        errE, stE = parse_localdata_for_comp(comp='E',
                                             stdata=stdata,
                                             sta=sta,
                                             start=start,
                                             end=end,
                                             ndval=ndval)
        # Retreived Succesfully?
        erd = errZ or errN or errE
        if not erd:
            # Combine Data
            st = stZ + stN + stE

    # No local data? Request using client
    if erd:
        erd = False

        for loc in sta.location:
            tloc = loc
            # Construct location name
            if len(tloc) == 0:
                tloc = "--"
            # Construct Channel List
            channelsZNE = sta.channel.upper() + 'Z,' + sta.channel.upper() + \
                'N,' + sta.channel.upper() + 'E'
            print(("*          {1:2s}[ZNE].{2:2s} - Checking Network".format(
                sta.station, sta.channel.upper(), tloc)))

            # Get waveforms, with extra 1 second to avoid
            # traces cropped too short - traces are trimmed later
            try:
                st = client.get_waveforms(network=sta.network,
                                          station=sta.station,
                                          location=loc,
                                          channel=channelsZNE,
                                          starttime=start,
                                          endtime=end + 1.,
                                          attach_response=False)
                if len(st) == 3:
                    print("*              - ZNE Data Downloaded")

                # It's possible if len(st)==1 that data is Z12
                else:
                    # Construct Channel List
                    channelsZ12 = sta.channel.upper() + 'Z,' + \
                        sta.channel.upper() + '1,' + \
                        sta.channel.upper() + '2'
                    msg = "*          {1:2s}[Z12].{2:2s} - Checking Network".format(
                        sta.station, sta.channel.upper(), tloc)
                    print(msg)
                    try:
                        st = client.get_waveforms(network=sta.network,
                                                  station=sta.station,
                                                  location=loc,
                                                  channel=channelsZ12,
                                                  starttime=start,
                                                  endtime=end + 1.,
                                                  attach_response=False)
                        if len(st) == 3:
                            print("*              - Z12 Data Downloaded")
                        else:
                            st = None
                    except:
                        st = None
            except:
                st = None

            # Break if we successfully obtained 3 components in st
            if not erd:

                break

    # Check the correct 3 components exist
    if st is None:
        print("* Error retrieving waveforms")
        print("**************************************************")
        return True, None

    # Three components successfully retrieved
    else:

        # Detrend and apply taper
        st.detrend('linear').taper(max_percentage=0.05, max_length=5.)

        # Check start times
        if not np.all([tr.stats.starttime == start for tr in st]):
            print("* Start times are not all close to true start: ")
            [
                print("*   " + tr.stats.channel + " " +
                      str(tr.stats.starttime) + " " + str(tr.stats.endtime))
                for tr in st
            ]
            print("*   True start: " + str(start))
            print("* -> Shifting traces to true start")
            delay = [tr.stats.starttime - start for tr in st]
            st_shifted = Stream(
                traces=[traceshift(tr, dt) for tr, dt in zip(st, delay)])
            st = st_shifted.copy()

        # Check sampling rate
        sr = st[0].stats.sampling_rate
        sr_round = float(floor_decimal(sr, 0))
        if not sr == sr_round:
            print("* Sampling rate is not an integer value: ", sr)
            print("* -> Resampling")
            st.resample(sr_round, no_filter=False)

        # Try trimming
        try:
            st.trim(start, end)
        except:
            print("* Unable to trim")
            print("* -> Aborting")
            print("**************************************************")
            return True, None

        # Check final lengths - they should all be equal if start times
        # and sampling rates are all equal and traces have been trimmed
        if not np.allclose([tr.stats.npts for tr in st[1:]], st[0].stats.npts):
            print("* Lengths are incompatible: ")
            [print("*     " + str(tr.stats.npts)) for tr in st]
            print("* -> Aborting")
            print("**************************************************")

            return True, None

        else:
            print("* Waveforms Retrieved...")
            return False, st
コード例 #13
0
    arrays = [Hx, Hy, Ex, Ey]
    k = 0

    # Loop over channels to create an obspy stream of the data
    for ar in arrays:
        stats.npts = len(ar)
        stats.channel = channels[k]
        ar = np.asarray(ar)
        trace = Trace(ar, stats)
        stream += trace
        trace = None
        k += 1

    # Create a copy of the stream and resample the copied stream to
    # 10 Hz using the default options of the obspy function resample
    finStream = stream.copy()
    finStream.resample(10.0)

    # Create numpy arrays of the resampled data
    Hx_fin = finStream.select(channel='Hx')[0].data
    Hy_fin = finStream.select(channel='Hy')[0].data
    Ex_fin = finStream.select(channel='Ex')[0].data
    Ey_fin = finStream.select(channel='Ey')[0].data

    # Take start time from resampled stream and set a new delta
    sttime = finStream[0].stats.starttime
    delta = .1
    time = []
    # Create a timestamp array for the resampled data using python's datetime library
    for i in range(0, len(finStream[0].data)):
        time = np.append(time, datetime.utcfromtimestamp(sttime + i * delta))
コード例 #14
0
ファイル: waveform.py プロジェクト: msimon00/obspy
class WaveformPlotting(object):
    """
    Class that provides several solutions for plotting large and small waveform
    data sets.

    .. warning::

        This class should NOT be used directly, instead use the
        :meth:`~obspy.core.stream.Stream.plot` method of the
        ObsPy :class:`~obspy.core.stream.Stream` or
        :class:`~obspy.core.trace.Trace` objects.

    It uses matplotlib to plot the waveforms.
    """
    def __init__(self, **kwargs):
        """
        Checks some variables and maps the kwargs to class variables.
        """
        self.stream = kwargs.get('stream')
        # Check if it is a Stream or a Trace object.
        if isinstance(self.stream, Trace):
            self.stream = Stream([self.stream])
        elif not isinstance(self.stream, Stream):
            msg = 'Plotting is only supported for Stream or Trace objects.'
            raise TypeError(msg)
        # Stream object should contain at least one Trace
        if len(self.stream) < 1:
            msg = "Empty stream object"
            raise IndexError(msg)
        # Type of the plot.
        self.type = kwargs.get('type', 'normal')
        # Start- and endtimes of the plots.
        self.starttime = kwargs.get('starttime', None)
        self.endtime = kwargs.get('endtime', None)
        self.fig_obj = kwargs.get('fig', None)
        # If no times are given take the min/max values from the stream object.
        if not self.starttime:
            self.starttime = min([trace.stats.starttime for \
                             trace in self.stream])
        if not self.endtime:
            self.endtime = max([trace.stats.endtime for \
                           trace in self.stream])
        # Map stream object and slice just in case.
        self.stream.trim(self.starttime, self.endtime)
        # normalize times
        if self.type == 'relative':
            dt = self.starttime
            # fix plotting boundaries
            self.endtime = UTCDateTime(self.endtime - self.starttime)
            self.starttime = UTCDateTime(0)
            # fix stream times
            for tr in self.stream:
                tr.stats.starttime = UTCDateTime(tr.stats.starttime - dt)
        # Whether to use straight plotting or the fast minmax method.
        self.plotting_method = kwargs.get('method', 'fast')
        # Below that value the data points will be plotted normally. Above it
        # the data will be plotted using a different approach (details see
        # below). Can be overwritten by the above self.plotting_method kwarg.
        self.max_npts = 400000
        # If automerge is enabled. Merge traces with the same id for the plot.
        self.automerge = kwargs.get('automerge', True)
        # If equal_scale is enabled all plots are equally scaled.
        self.equal_scale = kwargs.get('equal_scale', True)
        # Set default values.
        # The default value for the size is determined dynamically because
        # there might be more than one channel to plot.
        self.size = kwargs.get('size', None)
        # Values that will be used to calculate the size of the plot.
        self.default_width = 800
        self.default_height_per_channel = 250
        if not self.size:
            self.width = 800
            # Check the kind of plot.
            if self.type == 'dayplot':
                self.height = 600
            else:
                # One plot for each trace.
                if self.automerge:
                    count = self.__getMergablesIds()
                    count = len(count)
                else:
                    count = len(self.stream)
                self.height = count * 250
        else:
            self.width, self.height = self.size
        # Interval length in minutes for dayplot.
        self.interval = 60 * kwargs.get('interval', 15)
        # Scaling.
        self.vertical_scaling_range = kwargs.get('vertical_scaling_range',
                                                 None)
        # Dots per inch of the plot. Might be useful for printing plots.
        self.dpi = kwargs.get('dpi', 100)
        # Color of the graph.
        if self.type == 'dayplot':
            self.color = kwargs.get(
                'color', ('#B2000F', '#004C12', '#847200', '#0E01FF'))
            if isinstance(self.color, basestring):
                self.color = (self.color, )
            self.number_of_ticks = kwargs.get('number_of_ticks', None)
        else:
            self.color = kwargs.get('color', 'k')
            self.number_of_ticks = kwargs.get('number_of_ticks', 4)
        # Background and face color.
        self.background_color = kwargs.get('bgcolor', 'w')
        self.face_color = kwargs.get('face_color', 'w')
        # Transparency. Overwrites background and facecolor settings.
        self.transparent = kwargs.get('transparent', False)
        if self.transparent:
            self.background_color = None
        # Ticks.
        self.tick_format = kwargs.get('tick_format', '%H:%M:%S')
        self.tick_rotation = kwargs.get('tick_rotation', 0)
        # Whether or not to save a file.
        self.outfile = kwargs.get('outfile')
        self.handle = kwargs.get('handle')
        # File format of the resulting file. Usually defaults to PNG but might
        # be dependent on your matplotlib backend.
        self.format = kwargs.get('format')

    def __getMergeId(self, tr):
        tr_id = tr.id
        # don't merge normal traces with previews
        try:
            if tr.stats.preview:
                tr_id += 'preview'
        except KeyError:
            pass
        # don't merge traces with different processing steps
        try:
            if tr.stats.processing:
                tr_id += str(tr.stats.processing)
        except KeyError:
            pass
        return tr_id

    def __getMergablesIds(self):
        ids = []
        for tr in self.stream:
            tr_id = self.__getMergeId(tr)
            if not tr_id in ids:
                ids.append(tr_id)
        return ids

    def plotWaveform(self, *args, **kwargs):
        """
        Creates a graph of any given ObsPy Stream object. It either saves the
        image directly to the file system or returns an binary image string.

        For all color values you can use legit HTML names, HTML hex strings
        (e.g. '#eeefff') or you can pass an R , G , B tuple, where each of
        R , G , B are in the range [0, 1]. You can also use single letters for
        basic built-in colors ('b' = blue, 'g' = green, 'r' = red, 'c' = cyan,
        'm' = magenta, 'y' = yellow, 'k' = black, 'w' = white) and gray shades
        can be given as a string encoding a float in the 0-1 range.
        """
        # Setup the figure if not passed explicitly.
        if not self.fig_obj:
            self.__setupFigure()
        else:
            self.fig = self.fig_obj
        # Determine kind of plot and do the actual plotting.
        if self.type == 'dayplot':
            self.plotDay(*args, **kwargs)
        else:
            self.plot(*args, **kwargs)
        # Adjust the subplot so there is always a fixed margin on every side
        if self.type != 'dayplot':
            fract_y = 60.0 / self.height
            fract_y2 = 40.0 / self.height
            fract_x = 80.0 / self.width
            self.fig.subplots_adjust(top=1.0 - fract_y,
                                     bottom=fract_y2,
                                     left=fract_x,
                                     right=1.0 - fract_x / 2)
        self.fig.canvas.draw()
        # The following just serves as a unified way of saving and displaying
        # the plots.
        if not self.transparent:
            extra_args = {
                'dpi': self.dpi,
                'facecolor': self.face_color,
                'edgecolor': self.face_color
            }
        else:
            extra_args = {'dpi': self.dpi, 'transparent': self.transparent}
        if self.outfile:
            # If format is set use it.
            if self.format:
                self.fig.savefig(self.outfile,
                                 format=self.format,
                                 **extra_args)
            # Otherwise use format from self.outfile or default to PNG.
            else:
                self.fig.savefig(self.outfile, **extra_args)
        else:
            # Return an binary imagestring if not self.outfile but self.format.
            if self.format:
                imgdata = StringIO.StringIO()
                self.fig.savefig(imgdata, format=self.format, **extra_args)
                imgdata.seek(0)
                return imgdata.read()
            elif self.handle:
                return self.fig
            else:
                if not self.fig_obj:
                    plt.show()

    def plot(self, *args, **kwargs):
        """
        Plot the Traces showing one graph per Trace.

        Plots the whole time series for self.max_npts points and less. For more
        points it plots minmax values.
        """
        stream_new = []
        # Just remove empty traces.
        if not self.automerge:
            for tr in self.stream:
                stream_new.append([])
                if len(tr.data):
                    stream_new[-1].append(tr)
        else:
            # Generate sorted list of traces (no copy)
            # Sort order, id, starttime, endtime
            ids = self.__getMergablesIds()
            for id in ids:
                stream_new.append([])
                for tr in self.stream:
                    tr_id = self.__getMergeId(tr)
                    if tr_id == id:
                        # does not copy the elements of the data array
                        tr_ref = copy(tr)
                        # Trim does nothing if times are outside
                        if self.starttime >= tr_ref.stats.endtime or \
                                self.endtime <= tr_ref.stats.starttime:
                            continue
                        if tr_ref.data.size:
                            stream_new[-1].append(tr_ref)
                # delete if empty list
                if not len(stream_new[-1]):
                    stream_new.pop()
                    continue
                stream_new[-1].sort(key=lambda x: x.stats.endtime)
                stream_new[-1].sort(key=lambda x: x.stats.starttime)
        # If everything is lost in the process raise an Exception.
        if not len(stream_new):
            raise Exception("Nothing to plot")
        # Create helper variable to track ids and min/max/mean values.
        self.stats = []
        # Loop over each Trace and call the appropriate plotting method.
        self.axis = []
        for _i, tr in enumerate(stream_new):
            # Each trace needs to have the same sampling rate.
            sampling_rates = set([_tr.stats.sampling_rate for _tr in tr])
            if len(sampling_rates) > 1:
                msg = "All traces with the same id need to have the same " + \
                      "sampling rate."
                raise Exception(msg)
            sampling_rate = sampling_rates.pop()
            if self.background_color:
                ax = self.fig.add_subplot(len(stream_new),
                                          1,
                                          _i + 1,
                                          axisbg=self.background_color)
            else:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1)
            self.axis.append(ax)
            # XXX: Also enable the minmax plotting for previews.
            if self.plotting_method != 'full' and \
                ((self.endtime - self.starttime) * sampling_rate > \
                 self.max_npts):
                self.__plotMinMax(stream_new[_i], ax, *args, **kwargs)
            else:
                self.__plotStraight(stream_new[_i], ax, *args, **kwargs)
        # Set ticks.
        self.__plotSetXTicks()
        self.__plotSetYTicks()

    def plotDay(self, *args, **kwargs):
        """
        Extend the seismogram.
        """
        # Create a copy of the stream because it might be operated on.
        self.stream = self.stream.copy()
        # Merge and trim to pad.
        self.stream.merge()
        if len(self.stream) != 1:
            msg = "All traces need to be of the same id for a dayplot"
            raise ValueError(msg)
        self.stream.trim(self.starttime, self.endtime, pad=True)
        # Get minmax array.
        self.__dayplotGetMinMaxValues(self, *args, **kwargs)
        # Normalize array
        self.__dayplotNormalizeValues(self, *args, **kwargs)
        # Get timezone information. If none is  given, use local time.
        self.time_offset = kwargs.get(
            'time_offset',
            round((UTCDateTime(datetime.now()) - UTCDateTime()) / 3600.0, 2))
        self.timezone = kwargs.get('timezone', 'local time')
        # Try to guess how many steps are needed to advance one full time unit.
        self.repeat = None
        intervals = self.extreme_values.shape[0]
        if self.interval < 60 and 60 % self.interval == 0:
            self.repeat = 60 / self.interval
        elif self.interval < 1800 and 3600 % self.interval == 0:
            self.repeat = 3600 / self.interval
        # Otherwise use a maximum value of 10.
        else:
            if intervals >= 10:
                self.repeat = 10
            else:
                self.repeat = intervals
        # Create axis to plot on.
        if self.background_color:
            ax = self.fig.add_subplot(1, 1, 1, axisbg=self.background_color)
        else:
            ax = self.fig.add_subplot(1, 1, 1)
        # Adjust the subplots to be symmetrical. Also make some more room
        # at the top.
        self.fig.subplots_adjust(left=0.12, right=0.88, top=0.95)
        # Create x_value_array.
        aranged_array = np.arange(self.width)
        x_values = np.empty(2 * self.width)
        x_values[0::2] = aranged_array
        x_values[1::2] = aranged_array
        intervals = self.extreme_values.shape[0]
        # Loop over each step.
        for _i in xrange(intervals):
            # Create offset array.
            y_values = np.ma.empty(self.width * 2)
            y_values.fill(intervals - (_i + 1))
            # Add min and max values.
            y_values[0::2] += self.extreme_values[_i, :, 0]
            y_values[1::2] += self.extreme_values[_i, :, 1]
            # Plot the values.
            ax.plot(x_values, y_values, color=self.color[_i % len(self.color)])
        # Set ranges.
        ax.set_xlim(0, self.width - 1)
        ax.set_ylim(-0.3, intervals + 0.3)
        self.axis = [ax]
        # Set ticks.
        self.__dayplotSetYTicks(*args, **kwargs)
        self.__dayplotSetXTicks(*args, **kwargs)
        # Choose to show grid but only on the x axis.
        self.fig.axes[0].grid()
        self.fig.axes[0].yaxis.grid(False)

    def __plotStraight(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Just plots the data samples in the self.stream. Useful for smaller
        datasets up to around 1000000 samples (depending on the machine its
        being run on).

        Slow and high memory consumption for large datasets.
        """
        # Copy to avoid any changes to original data.
        trace = [tr.copy() for tr in trace]
        if len(trace) > 1:
            stream = Stream(traces=trace)
            # Merge with 'interpolation'. In case of overlaps this method will
            # always use the longest available trace.
            if hasattr(trace[0].stats, 'preview') and trace[0].stats.preview:
                stream = Stream(traces=stream)
                stream = mergePreviews(stream)
            else:
                stream.merge(method=1)
            trace = stream[0]
        else:
            trace = trace[0]
        # Check if it is a preview file and adjust accordingly.
        # XXX: Will look weird if the preview file is too small.
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            # Mask the gaps.
            trace.data = np.ma.masked_array(trace.data)
            trace.data[trace.data == -1] = np.ma.masked
            # Recreate the min_max scene.
            dtype = trace.data.dtype
            old_time_range = trace.stats.endtime - trace.stats.starttime
            data = np.empty(2 * trace.stats.npts, dtype=dtype)
            data[0::2] = trace.data / 2.0
            data[1::2] = -trace.data / 2.0
            trace.data = data
            # The times are not supposed to change.
            trace.stats.delta = old_time_range / float(trace.stats.npts - 1)
        # Write to self.stats.
        calib = trace.stats.calib
        max = trace.data.max()
        min = trace.data.min()
        # set label
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            tr_id = trace.id + ' [preview]'
        elif hasattr(trace, 'label'):
            tr_id = trace.label
        else:
            tr_id = trace.id
        self.stats.append(
            [tr_id, calib * trace.data.mean(), calib * min, calib * max])
        # Pad the beginning and the end with masked values if necessary. Might
        # seem like overkill but it works really fast and is a clean solution
        # to gaps at the beginning/end.
        concat = [trace]
        if self.starttime != trace.stats.starttime:
            samples = (trace.stats.starttime - self.starttime) * \
                trace.stats.sampling_rate
            temp = [np.ma.masked_all(int(samples))]
            concat = temp.extend(concat)
            concat = temp
        if self.endtime != trace.stats.endtime:
            samples = (self.endtime - trace.stats.endtime) * \
                      trace.stats.sampling_rate
            concat.append(np.ma.masked_all(int(samples)))
        if len(concat) > 1:
            # Use the masked array concatenate, otherwise it will result in a
            # not masked array.
            trace.data = np.ma.concatenate(concat)
            # set starttime and calculate endtime
            trace.stats.starttime = self.starttime
        trace.data *= calib
        ax.plot(trace.data, color=self.color)
        # Set the x limit for the graph to also show the masked values at the
        # beginning/end.
        ax.set_xlim(0, len(trace.data) - 1)

    def __plotMinMax(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Plots the data using a min/max approach that calculated the minimum and
        maximum values of each "pixel" and than plots only these values. Works
        much faster with large data sets.
        """
        # Some variables to help calculate the values.
        starttime = self.starttime.timestamp
        endtime = self.endtime.timestamp
        # The same trace will always have the same sampling_rate.
        sampling_rate = trace[0].stats.sampling_rate
        # The samples per resulting pixel.
        pixel_length = int((endtime - starttime) / self.width * sampling_rate)
        # Loop over all the traces. Do not merge them as there are many samples
        # and therefore merging would be slow.
        for _i, tr in enumerate(trace):
            # Get the start of the next pixel in case the starttime of the
            # trace does not match the starttime of the plot.
            ts = tr.stats.starttime
            if ts > self.starttime:
                start = int(ceil(((ts - self.starttime) * \
                        sampling_rate) / pixel_length))
                # Samples before start.
                prestart = int(
                    ((self.starttime + start * pixel_length / sampling_rate) -
                     ts) * sampling_rate)
            else:
                start = 0
                prestart = 0
            # Figure out the number of pixels in the current trace.
            length = len(tr.data) - prestart
            pixel_count = int(length // pixel_length)
            rest = int(length % pixel_length)
            # Reference to new data array which does not copy data but is
            # reshapeable.
            data = tr.data[prestart:prestart + pixel_count * pixel_length]
            data = data.reshape(pixel_count, pixel_length)
            # Calculate extreme_values and put them into new array.
            extreme_values = np.ma.masked_all((self.width, 2), dtype=np.float)
            min = data.min(axis=1) * tr.stats.calib
            max = data.max(axis=1) * tr.stats.calib
            extreme_values[start:start + pixel_count, 0] = min
            extreme_values[start:start + pixel_count, 1] = max
            # First and last and last pixel need separate treatment.
            if start and prestart:
                extreme_values[start - 1, 0] = \
                    tr.data[:prestart].min() * tr.stats.calib
                extreme_values[start - 1, 1] = \
                    tr.data[:prestart].max() * tr.stats.calib
            if rest:
                if start + pixel_count == self.width:
                    index = self.width - 1
                else:
                    index = start + pixel_count
                extreme_values[index, 0] = \
                    tr.data[-rest:].min() * tr.stats.calib
                extreme_values[index, 1] = \
                    tr.data[-rest:].max() * tr.stats.calib
            # Use the first array as a reference and merge all following
            # extreme_values into it.
            if _i == 0:
                minmax = extreme_values
            else:
                # Merge minmax and extreme_values.
                min = np.ma.empty((self.width, 2))
                max = np.ma.empty((self.width, 2))
                # Fill both with the values.
                min[:, 0] = minmax[:, 0]
                min[:, 1] = extreme_values[:, 0]
                max[:, 0] = minmax[:, 1]
                max[:, 1] = extreme_values[:, 1]
                # Find the minimum and maximum values.
                min = min.min(axis=1)
                max = max.max(axis=1)
                # Write again to minmax.
                minmax[:, 0] = min
                minmax[:, 1] = max
        # set label
        if hasattr(trace[0], 'label'):
            tr_id = trace[0].label
        else:
            tr_id = trace[0].id
        # Write to self.stats.
        self.stats.append(
            [tr_id,
             minmax.mean(), minmax[:, 0].min(), minmax[:, 1].max()])
        # Finally plot the data.
        x_values = np.empty(2 * self.width)
        aranged = np.arange(self.width)
        x_values[0::2] = aranged
        x_values[1::2] = aranged
        # Initialize completely masked array. This version is a little bit
        # slower than first creating an empty array and then setting the mask
        # to True. But on NumPy 1.1 this results in a 0-D array which can not
        # be indexed.
        y_values = np.ma.masked_all(2 * self.width)
        y_values[0::2] = minmax[:, 0]
        y_values[1::2] = minmax[:, 1]
        ax.plot(x_values, y_values, color=self.color)
        # Set the x-limit to avoid clipping of masked values.
        ax.set_xlim(0, self.width - 1)

    def __plotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot and sets time ticks on the x axis.
        """
        # Loop over all axes.
        for ax in self.axis:
            # Get the xlimits.
            start, end = ax.get_xlim()
            # Set the location of the ticks.
            ax.set_xticks(np.linspace(start, end, self.number_of_ticks))
            # Figure out times.
            interval = float(self.endtime - self.starttime) / \
                       (self.number_of_ticks - 1)
            # Set the actual labels.
            if self.type == 'relative':
                labels = ['%.2f' % (self.starttime + _i * interval).timestamp \
                          for _i in range(self.number_of_ticks)]
            else:
                labels = [(self.starttime + _i * \
                          interval).strftime(self.tick_format) for _i in \
                          range(self.number_of_ticks)]

            ax.set_xticklabels(labels,
                               fontsize='small',
                               rotation=self.tick_rotation)

    def __plotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot, reads self.stats and sets all ticks on
        the y axis.

        This method also adjusts the y limits so that the mean value is always
        in the middle of the graph and all graphs are equally scaled.
        """
        # Figure out the maximum distance from the mean value to either end.
        # Add 10 percent for better looking graphs.
        max_distance = max([
            max(trace[1] - trace[2], trace[3] - trace[1])
            for trace in self.stats
        ]) * 1.1
        # Loop over all axes.
        for _i, ax in enumerate(self.axis):
            mean = self.stats[_i][1]
            if not self.equal_scale:
                trace = self.stats[_i]
                max_distance = max(trace[1] - trace[2],
                                   trace[3] - trace[1]) * 1.1
            # Set the ylimit.
            min_range = mean - max_distance
            max_range = mean + max_distance
            # Set the location of the ticks.
            ticks = [
                mean - 0.75 * max_distance, mean - 0.5 * max_distance,
                mean - 0.25 * max_distance, mean, mean + 0.25 * max_distance,
                mean + 0.5 * max_distance, mean + 0.75 * max_distance
            ]
            ax.set_yticks(ticks)
            # Setup format of the major ticks
            if abs(max(ticks) - min(ticks)) > 10:
                # integer numbers
                fmt = '%d'
                if abs(min(ticks)) > 10e6:
                    # but switch back to exponential for huge numbers
                    fmt = '%.2g'
            else:
                fmt = '%.2g'
            ax.set_yticklabels([fmt % t for t in ax.get_yticks()],
                               fontsize='small')
            # Set the title of each plot.
            ax.set_title(self.stats[_i][0],
                         horizontalalignment='center',
                         fontsize='small',
                         verticalalignment='center')
            ax.set_ylim(min_range, max_range)

    def __dayplotGetMinMaxValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Takes a Stream object and calculates the min and max values for each
        pixel in the dayplot.

        Writes a three dimensional array. The first axis is the step, i.e
        number of trace, the second is the pixel in that step and the third
        contains the minimum and maximum value of the pixel.
        """
        # Helper variables for easier access.
        trace = self.stream[0]
        trace_length = len(trace.data)

        # Samples per interval.
        spi = int(self.interval * trace.stats.sampling_rate)
        # Check the approximate number of samples per pixel and raise
        # error as fit.
        spp = float(spi) / self.width
        if spp < 1.0:
            msg = """
            Too few samples to use dayplot with the given arguments.
            Adjust your arguments or use a different plotting method.
            """
            msg = " ".join(msg.strip().split())
            raise ValueError(msg)
        # Number of intervals plotted.
        noi = float(trace_length) / spi
        inoi = int(round(noi))
        # Plot an extra interval if at least 2 percent of the last interval
        # will actually contain data. Do it this way to lessen floating point
        # inaccuracies.
        if abs(noi - inoi) > 2E-2:
            noi = inoi + 1
        else:
            noi = inoi

        # Adjust data. Fill with masked values in case it is necessary.
        number_of_samples = noi * spi
        delta = number_of_samples - trace_length
        if delta < 0:
            trace.data = trace.data[:number_of_samples]
        elif delta > 0:
            trace.data = np.ma.concatenate(
                [trace.data,
                 createEmptyDataChunk(delta, trace.data.dtype)])

        # Create array for min/max values. Use masked arrays to handle gaps.
        extreme_values = np.ma.empty((noi, self.width, 2))
        trace.data.shape = (noi, spi)

        ispp = int(spp)
        fspp = spp % 1.0
        if fspp == 0.0:
            delta = None
        else:
            delta = spi - ispp * self.width

        # Loop over each interval to avoid larger errors towards the end.
        for _i in range(noi):
            if delta:
                cur_interval = trace.data[_i][:-delta]
                rest = trace.data[_i][-delta:]
            else:
                cur_interval = trace.data[_i]
            cur_interval.shape = (self.width, ispp)
            extreme_values[_i, :, 0] = cur_interval.min(axis=1)
            extreme_values[_i, :, 1] = cur_interval.max(axis=1)
            # Add the rest.
            if delta:
                extreme_values[_i, -1, 0] = min(extreme_values[_i, -1, 0],
                                                rest.min())
                extreme_values[_i, -1, 1] = max(extreme_values[_i, -1, 0],
                                                rest.max())
        # Set class variable.
        self.extreme_values = extreme_values

    def __dayplotNormalizeValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Normalizes all values in the 3 dimensional array, so that the minimum
        value will be 0 and the maximum value will be 1.

        It will also convert all values to floats.
        """
        # Convert to native floats.
        self.extreme_values = self.extreme_values.astype(np.float) * \
                              self.stream[0].stats.calib
        # Make sure that the mean value is at 0
        self.extreme_values -= self.extreme_values.mean()

        # Scale so that 99.5 % of the data will fit the given range.
        if self.vertical_scaling_range is None:
            percentile_delta = 0.005
            max_values = self.extreme_values[:, :, 1].compressed()
            min_values = self.extreme_values[:, :, 0].compressed()
            # Remove masked values.
            max_values.sort()
            min_values.sort()
            length = len(max_values)
            index = int((1.0 - percentile_delta) * length)
            max_val = max_values[index]
            index = int(percentile_delta * length)
            min_val = min_values[index]
        # Exact fit.
        elif float(self.vertical_scaling_range) == 0.0:
            max_val = self.extreme_values[:, :, 1].max()
            min_val = self.extreme_values[:, :, 0].min()
        # Fit with custom range.
        else:
            max_val = min_val = abs(self.vertical_scaling_range) / 2.0

        # Scale from 0 to 1.
        self.extreme_values = self.extreme_values / (
            max(abs(max_val), abs(min_val)) * 2)
        self.extreme_values += 0.5

    def __dayplotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the xticks for the dayplot.
        """
        localization_dict = kwargs.get('localization_dict', {})
        localization_dict.setdefault('seconds', 'seconds')
        localization_dict.setdefault('minutes', 'minutes')
        localization_dict.setdefault('hours', 'hours')
        localization_dict.setdefault('time in', 'time in')
        max_value = self.width - 1
        # Check whether it are sec/mins/hours and convert to a universal unit.
        if self.interval < 240:
            time_type = localization_dict['seconds']
            time_value = self.interval
        elif self.interval < 24000:
            time_type = localization_dict['minutes']
            time_value = self.interval / 60
        else:
            time_type = localization_dict['hours']
            time_value = self.interval / 3600
        count = None
        # Hardcode some common values. The plus one is intentional. It had
        # hardly any performance impact and enhances readability.
        if self.interval == 15 * 60:
            count = 15 + 1
        elif self.interval == 20 * 60:
            count = 4 + 1
        elif self.interval == 30 * 60:
            count = 6 + 1
        elif self.interval == 60 * 60:
            count = 4 + 1
        elif self.interval == 90 * 60:
            count = 6 + 1
        elif self.interval == 120 * 60:
            count = 4 + 1
        elif self.interval == 180 * 60:
            count = 6 + 1
        elif self.interval == 240 * 60:
            count = 6 + 1
        elif self.interval == 300 * 60:
            count = 6 + 1
        elif self.interval == 360 * 60:
            count = 12 + 1
        elif self.interval == 720 * 60:
            count = 12 + 1
        # Otherwise run some kind of autodetection routine.
        if not count:
            # Up to 15 time units and if its a full number, show every unit.
            if time_value <= 15 and time_value % 1 == 0:
                count = time_value
            # Otherwise determine whether they are dividable for numbers up to
            # 15. If a number is not dividable just show 10 units.
            else:
                count = 10
                for _i in xrange(15, 1, -1):
                    if time_value % _i == 0:
                        count = _i
                        break
            # Show at least 5 ticks.
            if count < 5:
                count = 5
        # Everything can be overwritten by user specified number of ticks.
        if self.number_of_ticks:
            count = self.number_of_ticks
        # Calculate and set ticks.
        ticks = np.linspace(0.0, max_value, count)
        ticklabels = ['%i' % _i for _i in np.linspace(0.0, time_value, count)]
        self.axis[0].set_xticks(ticks)
        self.axis[0].set_xticklabels(ticklabels, rotation=self.tick_rotation)
        self.axis[0].set_xlabel('%s %s' %
                                (localization_dict['time in'], time_type))

    def __dayplotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the yticks for the dayplot.
        """
        intervals = self.extreme_values.shape[0]
        # Do not display all ticks except if it are five or less steps.
        if intervals <= 5:
            tick_steps = range(0, intervals)
            ticks = np.arange(intervals, 0, -1, dtype=np.float)
            ticks -= 0.5
        else:
            tick_steps = range(0, intervals, self.repeat)
            ticks = np.arange(intervals, 0, -1 * self.repeat, dtype=np.float)
            ticks -= 0.5

        left_time_offset = 0
        right_time_offset = self.time_offset
        left_ylabel = 'UTC'

        # Complicated way to calculate the label of the y-Axis showing the
        # second time zone.
        sign = '%+i' % self.time_offset
        sign = sign[0]
        time_label = self.timezone.strip() + ' (UTC%s%02i:%02i)' % \
                     (sign, abs(self.time_offset), (self.time_offset % 1 * 60))
        right_ylabel = time_label

        if kwargs.get('swap_time_axis', False):
            left_time_offset, right_time_offset = \
                    right_time_offset, left_time_offset
            left_ylabel, right_ylabel = right_ylabel, left_ylabel

        left_ticklabels = [(self.starttime + _i * self.interval + \
                            left_time_offset * 3600).strftime('%H:%M') \
                      for _i in tick_steps]
        right_ticklabels = [(self.starttime + (_i + 1) * self.interval + \
                            right_time_offset * 3600).strftime('%H:%M') \
                      for _i in tick_steps]

        self.axis[0].set_yticks(ticks)
        self.axis[0].set_yticklabels(left_ticklabels)
        self.axis[0].set_ylabel(left_ylabel)
        # Save range.
        yrange = self.axis[0].get_ylim()
        # Create twin axis.
        #XXX
        self.twin = self.axis[0].twinx()
        self.twin.set_ylim(yrange)
        self.twin.set_yticks(ticks)
        self.twin.set_yticklabels(right_ticklabels)
        self.twin.set_ylabel(right_ylabel)

    def __setupFigure(self):
        """
        The design and look of the whole plot to be produced.
        """
        # Setup figure and axes
        self.fig = plt.figure(num=None,
                              dpi=self.dpi,
                              figsize=(float(self.width) / self.dpi,
                                       float(self.height) / self.dpi))
        # XXX: Figure out why this is needed sometimes.
        # Set size and dpi.
        self.fig.set_dpi(self.dpi)
        self.fig.set_figwidth(float(self.width) / self.dpi)
        self.fig.set_figheight(float(self.height) / self.dpi)
        x = self.__getX(10)
        y = self.__getY(15)
        if hasattr(self.stream, 'label'):
            suptitle = self.stream.label
        elif self.type == 'relative':
            # hide time information for relative plots
            return
        elif self.type == 'dayplot':
            suptitle = '%s %s' % (self.stream[0].id,
                                  self.starttime.strftime('%Y-%m-%d'))
            x = self.fig.subplotpars.left
        else:
            pattern = '%Y-%m-%dT%H:%M:%SZ'
            suptitle = '%s  -  %s' % (self.starttime.strftime(pattern),
                                      self.endtime.strftime(pattern))
        # add suptitle
        self.fig.suptitle(suptitle,
                          x=x,
                          y=y,
                          fontsize='small',
                          horizontalalignment='left')

    def __getY(self, dy):
        return (self.height - dy) * 1.0 / self.height

    def __getX(self, dx):
        return dx * 1.0 / self.width
コード例 #15
0
# Number of points of windows and overlap for PSD calculation
# Easiest to split up in powers of two
nfft = 2**10
windlap = 0.5

total_seconds = ((60. * 60. * 24. * Daynum) - (3601. * 3.))

st.detrend('constant')
st.merge(fill_value=0.)

# Start the loop of PSD calculations

for i in range(0, Nwin):

    st2 = st.copy()

    # get a random start time in seconds

    # Note this will always be an integer second

    ss = random.randint(0, total_seconds)

    if "H_day" not in vars():
        H_day = ss / (60. * 60. * 24)

    else:
        H_day = np.vstack((H_day, ss / (60. * 60. * 24)))

    stime = sday + ss
コード例 #16
0
ファイル: test_trigger.py プロジェクト: msimon00/obspy
 def test_coincidenceTrigger(self):
     """
     Test network coincidence trigger.
     """
     st = Stream()
     files = [
         "BW.UH1._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH2._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH3._.SHZ.D.2010.147.cut.slist.gz",
         "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"
     ]
     for filename in files:
         filename = os.path.join(self.path, filename)
         st += read(filename)
     # some prefiltering used for UH network
     st.filter('bandpass', freqmin=10, freqmax=20)
     # 1. no weighting, no stations specified, good settings
     # => 3 events, no false triggers
     # for the first test we make some additional tests regarding types
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st.copy(),
                              3,
                              sta=0.5,
                              lta=10)
     self.assertTrue(isinstance(res, list))
     self.assertTrue(len(res) == 3)
     expected_keys = [
         'time', 'coincidence_sum', 'duration', 'stations', 'trace_ids'
     ]
     expected_types = [UTCDateTime, float, float, list, list]
     for item in res:
         self.assertTrue(isinstance(item, dict))
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 3)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 4)
     # 2. no weighting, station selection
     # => 2 events, no false triggers
     trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ']
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta",
                                 3.5,
                                 1,
                                 st.copy(),
                                 3,
                                 trace_ids=trace_ids,
                                 sta=0.5,
                                 lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(4.2 < re[0]['duration'] < 4.8)
         self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[0]['coincidence_sum'] == 3)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(4.2 < re[1]['duration'] < 4.4)
         self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[1]['coincidence_sum'] == 3)
     # 3. weighting, station selection
     # => 3 events, no false triggers
     trace_ids = {
         'BW.UH1..SHZ': 0.4,
         'BW.UH2..SHZ': 0.35,
         'BW.UH3..SHZ': 0.4,
         'BW.UH4..EHZ': 0.25
     }
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st.copy(),
                              1.0,
                              trace_ids=trace_ids,
                              sta=0.5,
                              lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     # 4. weighting, station selection, max_len
     # => 2 events, no false triggers, small event does not overlap anymore
     trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6}
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta",
                                 3.5,
                                 1,
                                 st.copy(),
                                 1.2,
                                 trace_ids=trace_ids,
                                 max_trigger_length=0.13,
                                 sta=0.5,
                                 lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(0.2 < re[0]['duration'] < 0.3)
         self.assertTrue(re[0]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[0]['coincidence_sum'] == 1.2)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(0.18 < re[1]['duration'] < 0.2)
         self.assertTrue(re[1]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[1]['coincidence_sum'] == 1.2)
     # 5. station selection, extremely sensitive settings
     # => 4 events, 1 false triggers
     res = coincidenceTrigger("recstalta",
                              2.5,
                              1,
                              st.copy(),
                              2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3,
                              lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 6. same as 5, gappy stream
     # => same as 5 (almost, duration of 1 event changes by 0.02s)
     st2 = st.copy()
     tr1 = st2.pop(0)
     t1 = tr1.stats.starttime
     t2 = tr1.stats.endtime
     td = t2 - t1
     tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td)
     tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td)
     st2.insert(1, tr1a)
     st2.insert(3, tr1b)
     res = coincidenceTrigger("recstalta",
                              2.5,
                              1,
                              st2,
                              2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3,
                              lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 7. same as 3 but modify input trace ids and check output of trace_ids
     # and other additional information with ``details=True``
     st2 = st.copy()
     st2[0].stats.network = "XX"
     st2[1].stats.location = "99"
     st2[1].stats.network = ""
     st2[1].stats.location = "99"
     st2[1].stats.channel = ""
     st2[2].stats.channel = "EHN"
     st2[3].stats.network = ""
     st2[3].stats.channel = ""
     st2[3].stats.station = ""
     trace_ids = {
         'XX.UH1..SHZ': 0.4,
         '.UH2.99.': 0.35,
         'BW.UH3..EHN': 0.4,
         '...': 0.25
     }
     res = coincidenceTrigger("recstalta",
                              3.5,
                              1,
                              st2,
                              1.0,
                              trace_ids=trace_ids,
                              details=True,
                              sta=0.5,
                              lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[0]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[0]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[0]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[0]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['trace_ids'][0] == st2[1].id)
     self.assertTrue(res[1]['trace_ids'][1] == st2[2].id)
     self.assertTrue(res[1]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[2]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[2]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[2]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[2]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     expected_keys = [
         'cft_peak_wmean', 'cft_std_wmean', 'cft_peaks', 'cft_stds'
     ]
     expected_types = [float, float, list, list]
     for item in res:
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     # check some of the detailed info
     ev = res[-1]
     self.assertAlmostEquals(ev['cft_peak_wmean'], 18.097582068353855)
     self.assertAlmostEquals(ev['cft_std_wmean'], 4.7972436395074087)
     self.assertAlmostEquals(ev['cft_peaks'][0], 18.973097608513633)
     self.assertAlmostEquals(ev['cft_peaks'][1], 16.852175794415011)
     self.assertAlmostEquals(ev['cft_peaks'][2], 18.64005853900883)
     self.assertAlmostEquals(ev['cft_peaks'][3], 17.572363634564621)
     self.assertAlmostEquals(ev['cft_stds'][0], 4.8811165222946951)
     self.assertAlmostEquals(ev['cft_stds'][1], 4.4446373508521804)
     self.assertAlmostEquals(ev['cft_stds'][2], 5.3499401252675964)
     self.assertAlmostEquals(ev['cft_stds'][3], 4.2723814539487703)
コード例 #17
0
            if not st:
                logging.error(f"No data retrieved for {STA}")
                continue

            st.merge(fill_value='latest')
            st.trim(STARTTIME - 2 * config.taper_val,
                    ENDTIME + 2 * config.taper_val,
                    pad='true',
                    fill_value=0)
            st.sort()
            logging.info(st)

            # print('Removing sensitivity...')
            # st.remove_sensitivity()

            stf = st.copy()
            stf.detrend('demean')
            stf.taper(max_percentage=None, max_length=config.taper_val)
            stf.filter("bandpass",
                       freqmin=FMIN,
                       freqmax=FMAX,
                       corners=2,
                       zerophase=True)
            st.trim(STARTTIME, ENDTIME, pad='true', fill_value=0)

            # %% Get inventory and lat/lon info
            client = Client("IRIS")
            try:
                inv = client.get_stations(network=NET,
                                          station=STA,
                                          channel=CHAN,
コード例 #18
0
def main():

    print()
    print("#########################################")
    print("#        __                 _     _     #")
    print("#  _ __ / _|_ __  _   _    | |__ | | __ #")
    print("# | '__| |_| '_ \| | | |   | '_ \| |/ / #")
    print("# | |  |  _| |_) | |_| |   | | | |   <  #")
    print("# |_|  |_| | .__/ \__, |___|_| |_|_|\_\ #")
    print("#          |_|    |___/_____|           #")
    print("#                                       #")
    print("#########################################")
    print()

    # Run Input Parser
    args = arguments.get_hk_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        if args.phase in ['P', 'PP', 'allP']:
            datapath = Path('P_DATA') / stkey
        elif args.phase in ['S', 'SKS', 'allS']:
            datapath = Path('S_DATA') / stkey
        if not datapath.is_dir():
            print('Path to ' + str(datapath) + ' doesn`t exist - continuing')
            continue

        # Define save path
        if args.save:
            savepath = Path('HK_DATA') / stkey
            if not savepath.is_dir():
                print('Path to ' + str(savepath) +
                      ' doesn`t exist - creating it')
                savepath.mkdir(parents=True)

        # Get search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()

        datafiles = [x for x in datapath.iterdir() if x.is_dir()]
        for folder in datafiles:

            # Skip hidden folders
            if folder.name.startswith('.'):
                continue

            date = folder.name.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                # Load meta data
                metafile = folder / "Meta_Data.pkl"
                if not metafile.is_file():
                    continue
                meta = pickle.load(open(metafile, 'rb'))

                # Skip data not in list of phases
                if meta.phase not in args.listphase:
                    continue

                # QC Thresholding
                if meta.snrh < args.snrh:
                    continue
                if meta.snr < args.snr:
                    continue
                if meta.cc < args.cc:
                    continue

                # # Check bounds on data
                # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]:
                #     continue
                # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]:
                #     continue

                # If everything passed, load the RF data
                filename = folder / "RF_Data.pkl"
                if filename.is_file():
                    file = open(filename, "rb")
                    rfdata = pickle.load(file)
                    rfRstream.append(rfdata[1])
                    file.close()
                if rfdata[0].stats.npts != 1451:
                    print(folder)

        if len(rfRstream) == 0:
            continue

        if args.no_outl:
            t1 = 0.
            t2 = 30.

            varR = []
            for i in range(len(rfRstream)):
                taxis = rfRstream[i].stats.taxis
                tselect = (taxis > t1) & (taxis < t2)
                varR.append(np.var(rfRstream[i].data[tselect]))
            varR = np.array(varR)

            # Remove outliers wrt variance within time range
            medvarR = np.median(varR)
            madvarR = 1.4826 * np.median(np.abs(varR - medvarR))
            robustR = np.abs((varR - medvarR) / madvarR)
            outliersR = np.arange(len(rfRstream))[robustR > 2.5]
            for i in outliersR[::-1]:
                rfRstream.remove(rfRstream[i])

        print('')
        print("Number of radial RF data: " + str(len(rfRstream)))
        print('')

        # Try binning if specified
        if args.calc_dip:
            rf_tmp = binning.bin_baz_slow(rfRstream,
                                          nbaz=args.nbaz + 1,
                                          nslow=args.nslow + 1,
                                          pws=args.pws)
            rfRstream = rf_tmp[0]
        else:
            rf_tmp = binning.bin(rfRstream,
                                 typ='slow',
                                 nbin=args.nslow + 1,
                                 pws=args.pws)
            rfRstream = rf_tmp[0]

        # Get a copy of the radial component and filter
        if args.copy:
            rfRstream_copy = rfRstream.copy()
            rfRstream_copy.filter('bandpass',
                                  freqmin=args.bp_copy[0],
                                  freqmax=args.bp_copy[1],
                                  corners=2,
                                  zerophase=True)

        # Check bin counts:
        for tr in rfRstream:
            if (tr.stats.nbin < args.binlim):
                rfRstream.remove(tr)

        # Continue if stream is too short
        if len(rfRstream) < 5:
            continue

        if args.save_plot and not Path('HK_PLOTS').is_dir():
            Path('HK_PLOTS').mkdir(parents=True)

        print('')
        print("Number of radial RF bins: " + str(len(rfRstream)))
        print('')

        # Filter original stream
        rfRstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        try:
            hkstack = HkStack(rfRstream,
                              rfV2=rfRstream_copy,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)
        except:
            hkstack = HkStack(rfRstream,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)

        # Update attributes
        hkstack.hbound = args.hbound
        hkstack.kbound = args.kbound
        hkstack.dh = args.dh
        hkstack.dk = args.dk
        hkstack.weights = args.weights

        # Stack with or without dip
        if args.calc_dip:
            hkstack.stack_dip()
        else:
            hkstack.stack()

        # Average stacks
        hkstack.average(typ=args.typ)

        if args.plot:
            hkstack.plot(args.save_plot, args.title, args.form)

        if args.save:
            filename = savepath / (hkstack.rfV1[0].stats.station + \
                ".hkstack."+args.typ+".pkl")

            hkstack.save(file=filename)
コード例 #19
0
def parallel_waveformDL(mpi_flatfile_directory,eventdir_unfiltered,eventdir_filtered,client_name,resp_prefilt_bottom,respfilt_bottom,start_delta,end_delta,fig_x,fig_y,output_directory,rank,size):
    '''
    Run the waveform data download in parallel.
    Attempts to download waveforms for record information in each line of 
    latfile_csvpath. Removes instrument response and saves as unfiltered, also
    removes inst. resp and filters and saves in a separate directory for the 
    event, directory structure supplied as input. Data and plots only saved if
    SNR >=5.  Makes a figure of the waveform with theoretical P and S wave
    arrival times, and outputs new flatfile.
    Input:
        mpi_flatfile_directory: String with the directory containing the MPI flatfiles, saved in format "flatfile_{rank}.csv"
        eventdir_unfiltered:    String with the path to the unfiltered event save directory
        eventdir_filtered:      String with the path to the filtered event save directory
        client_name:            String with the client name (i.e., 'NCEDC')
        resp_prefilt_bottom:    List with the bottom two values in Hz to use in prefilt to use response (i.e., [0.005, 0.006])
        respfilt_bottom:        List with the bottom two values in Hz to use in prefilt w/ filtering as well
        start_delta:            Float with time difference in seconds to subtract from p-wave arrival for downloading waveforms
        end_delta:              Float with time difference in seconds to add to S wave arrival for downloading waveforms
        fig_x:                  Figure size x in inches to use for plotting the waveforms 
        fig_y:                  Figure size y in inches to use for plotting the waveforms 
        output_directory:       Float with path of directory to use for output flatfile csv's, WITHOUT slash at end
        rank:                   Float with the rank
        size:                   Float with the size
    Output:
        
    '''
    
    from obspy.clients.fdsn import Client
    from obspy.core import Stream, read, UTCDateTime
    from datetime import datetime, timedelta
    import matplotlib.pyplot as plt
    from glob import glob
    import numpy as np
    import csv
    import pandas as pd
    from os import path,makedirs
    import kappa_utils as kutils
   
    # InitializeLog file for data with unequal HHN and HHE lengths
    log = '/home/tnye/kappa/data/record_length.log'
    f = open(log,'w+')

    data_dir1 = eventdir_unfiltered
    data_dir2 = eventdir_filtered
    flatfile_path = mpi_flatfile_directory + '/flatfile_' + np.str(rank) + '.csv'
    client = Client(client_name)
    start_td = start_delta  ## Time difference to subtract from p-wave arrival for downloading waveforms
    end_td = end_delta ## Time difference to add to S wave arrival for downloading waveforms
    ## Filter bottoms to use in prefilt for -
    ##  INstrument response ONLY:
    resp_only_filtbottom = resp_prefilt_bottom
    ##  Instrument response and filtering
    resp_filt_filtbottom = respfilt_bottom
    ## Figure size:
    figure_size = (fig_x,fig_y)
    
    print('\n')
    
    ## Read in metadata:
    allmetadata = pd.read_csv(flatfile_path)
    
    ## Go through the metadata lines, extract record metadata, download waveforms,
    ##  correct instrument response / filter, make plots + save, save as a SAC in 
    ##  the appropriate directory.
    
    ## Start a counter for hte number of lines
    count = 0 
    ## Counter for the number of station/event permutations missed (no data to DL)
    nummissed = 0
    ## Empty list for... ?
    ffl = []
    
    ## Empty arrays for output flatfiles if the data was downloaded
    out_network = np.array([])
    out_station = np.array([])
    out_stlat = np.array([])
    out_stlon = np.array([])
    out_stelv = np.array([])
    out_quakenum = np.array([])
    out_m = np.array([])
    out_qlat = np.array([])
    out_qlon = np.array([])
    out_qdep = np.array([])
    out_orgt = np.array([])
    out_rhyp = np.array([])
    out_parr = np.array([])
    out_sarr = np.array([])

    ## Loop over the lines in the given file 
    for i_line in range(len(allmetadata)):
        
        #if nummissed == 10000:
        #    break
        ## Grab the appropriate metadata for this line
        i_eventnum = allmetadata.loc[i_line]['Quake#']
        i_qlon = allmetadata.loc[i_line]['Qlon']
        i_qlat = allmetadata.loc[i_line]['Qlat']
        i_qdep = allmetadata.loc[i_line]['Qdep']
        i_m = allmetadata.loc[i_line]['Mag']
        
        ##Origin time
        i_origintime = allmetadata.loc[i_line]['OrgT']
        i_date,i_time = allmetadata.loc[i_line]['OrgT'].split(' ')
        i_year,i_month,i_day = i_date.split('-')
        i_hr,i_min,i_sec = i_time.split(':')
       # try:
           # print(f'Origin = {i_origintime}... Try:')
           # i_sec,_ = i_sec.split('.')
       # except:
            #print(f'Origin = {i_origintime}... Except:')
           # continue
        
        i_parr = allmetadata.loc[i_line]['Parr']
        i_sarr =  allmetadata.loc[i_line]['Sarr']
        i_network = allmetadata.loc[i_line]['Network']
        i_Parr = datetime.strptime(i_parr,"%Y-%m-%d %H:%M:%S")
        i_Sarr =datetime.strptime(i_sarr,"%Y-%m-%d %H:%M:%S")
        i_station = allmetadata.loc[i_line]['Name']
        i_stlon = allmetadata.loc[i_line]['Slon']
        i_stlat = allmetadata.loc[i_line]['Slat']
        i_stelv = allmetadata.loc[i_line]['Selv']
        
        i_rhyp = allmetadata.loc[i_line]['rhyp']
        
        sp = i_Sarr - i_Parr   ## Get the s arrival - p arrival time difference
        i_start = i_Sarr - timedelta(seconds=start_td)
        i_end = i_start + timedelta(seconds=end_td)
        i_network = str(i_network)
        i_station = str(i_station)

        event = 'Event'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+'_'+i_sec
        
    ########################################################################################################################################3
        ## Initiate empty stream obmects for the N and E channels
        raw_stn = Stream()
        raw_ste = Stream()
        
        ## Try to download data for the N channel - if it works, add it to the N 
        ##  stream object
#        print(f'searching for {i_network} {i_station} HNN event {i_eventnum} on rank {rank}')
        try:
            raw_stn += client.get_waveforms(i_network, i_station, "*", 'HHN', UTCDateTime(i_start), UTCDateTime(i_end), attach_response=True)
        ## If it's missed, add 0.5 to the missed number (half a channel missing),
        ##  and continue...
        except:
            nummissed += .5
            #print('missed DL record for network ' + i_network + ', station ' + i_station + ' on channel HHN, event ' + np.str(i_eventnum))
            ## continue to next line of loop (i_line)
            continue
        
        ## Make copies of the raw station objects to use for removing instrument 
        ##   response ONLY, and for removing ins. resp and filtering:
        ir_stn = raw_stn.copy()
        ir_filt_stn = raw_stn.copy()
        
        ## Get sampling rate and make the filters to use in removing instrument response
        samprate = ir_stn[0].stats['sampling_rate']
        ## Make the prefilt for the instrment response - 1/2 sampling rate is nyquist
        prefilt1 = (resp_only_filtbottom[0], resp_only_filtbottom[1], ((samprate/2)-5), (samprate/2))  ## this is 20 to 25 at the end
        try:
            ir_stn[0].remove_response(output='ACC',pre_filt=prefilt1) ## The units of data are now acceleration, m/s/s
        except:
            nummissed += .5
            #print('missed remove IR record for network ' + i_network + ', station ' + i_station + ' on channel HHN, event ' + np.str(i_eventnum))
            ## continue to next line of loop (i_line)
            continue
        prefilt2 = (resp_filt_filtbottom[0],resp_filt_filtbottom[1], ((samprate/2)-5), (samprate/2)) ## 0.063 just above 16 sec microsiesem, .28 just above 4 sec
        try:
            ir_filt_stn[0].remove_response(output='ACC',pre_filt=prefilt2) ## The units of data are now acceleration, m/s/s
        except:
            nummissed += .5
            #print('missed remove IR record for network ' + i_network + ', station ' + i_station + ' on channel HHN, event ' + np.str(i_eventnum))
            ## continue to next line of loop (i_line)
            continue
        
        
    ######################################################################################################################################################################################
        ## Try to download for the E channel - if it works, do same as above
#        print(f'searching for {i_network} {i_station} HNE event {i_eventnum} on rank {rank}')
        try:
            raw_ste += client.get_waveforms(i_network, i_station, "*", 'HHE', UTCDateTime(i_start), UTCDateTime(i_end), attach_response=True)
        except:
            nummissed += .5
            #print('missed DL record for network ' + i_network + ', station ' + i_station + ' on channel HHE, event ' + np.str(i_eventnum))
            continue
        ir_ste = raw_ste.copy()
        ir_filt_ste = raw_ste.copy()
        
        
        samprate = ir_ste[0].stats['sampling_rate']
        ## Make the prefilt for the instrment response - AT.SIT is @ 50Hz so 25 is nyquist
        prefilt1 = (resp_only_filtbottom[0], resp_only_filtbottom[1], ((samprate/2)-5), (samprate/2))  ## this is 20 to 25 at the end
        try:
            ir_ste[0].remove_response(output='ACC',pre_filt=prefilt1) ## The units of data are now acceleration, m/s/s
        except:
            nummissed += .5
            #print('missed remove IR record for network ' + i_network + ', station ' + i_station + ' on channel HHE, event ' + np.str(i_eventnum))
            ## continue to next line of loop (i_line)
            continue
        prefilt2 = (resp_filt_filtbottom[0],resp_filt_filtbottom[1], ((samprate/2)-5), (samprate/2)) ## 0.063 just above 16 sec microsiesem, .28 just above 4 sec
        try:
            ir_filt_ste[0].remove_response(output='ACC',pre_filt=prefilt2) ## The units of data are now acceleration, m/s/s
        except:
            nummissed += .5
            #print('missed remove IR record for network ' + i_network + ', station ' + i_station + ' on channel HHE, event ' + np.str(i_eventnum))
            ## continue to next line of loop (i_line)
            continue
        
        if len(raw_stn[0].data) != len(raw_ste[0].data):
            print('Length HHN != Length HHE')
            print(f'\t{i_network} {i_station}')
            print(f'\t{event}')
            f.write(f'{i_network} {i_station} {event}\n')

    ######################################################################################################################################################################################
        ## Calculate SNR and only save waveforms and plots if it's >= 5. 
        SNR_N = kutils.comp_SNR(ir_filt_stn, 10, 30, 15)
        SNR_E = kutils.comp_SNR(ir_filt_ste, 10, 30, 15)
        SNR_avg = (SNR_N + SNR_E)/2
        
#        print(f'SNR for {i_network} {i_station} {i_eventnum} is {SNR_avg} on rank {rank}') 

        if SNR_avg >=5:
            
            ## Make sure paths exist
            if not path.exists(data_dir1+event):
                makedirs(data_dir1+event)
            if not path.exists(data_dir2+event):
                makedirs(data_dir2+event)
            
            ### North component 
            ## make and save plot of unfiltered data
            # plt.figure(figsize=figure_size)
            # plt.plot(ir_stn[0].times(),ir_stn[0].data,'g')
            # ## Plot a vertical line for the p-wave arrival
            # plt.axvline(x=start_td)
            # ## Plot a vertical line for the s-wave arrival
            # plt.axvline(x=start_td-sp.seconds)
            # plt.xlabel('Time from ')
            # plt.ylabel('Acceleration (m/s/s)')
            # plt.title('Instrument Response Removed, Unfiltered, \n' + i_network + i_station )
            # plt.savefig(data_dir1+event+'/'+i_network+'_'+i_station+'_'+'HHN'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.png')
            # plt.close('all')
            ir_stn[0].write(data_dir1+event+'/'+i_network+'_'+i_station+'_'+'HHN'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_'+ i_sec + '.sac',format='SAC')
            
            ## make and save plot of filtered data
            # plt.figure(figsize=figure_size)
            # plt.plot(ir_filt_stn[0].times(),ir_filt_stn[0].data,'g')
            # ## Plot a vertical line for the p-wave arrival
            # plt.axvline(x=start_td)
            # ## Plot a vertical line for the s-wave arrival
            # plt.axvline(x=start_td-sp.seconds)
            # plt.xlabel('Time from ')
            # plt.ylabel('Acceleration (m/s/s)')
            # plt.title('Instrument Response Removed, filtered, \n' + i_network +i_station )
            # plt.savefig(data_dir2+event+'/'+i_network+'_'+i_station+'_'+'HHN'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.png')
            # plt.close('all')
            ir_filt_stn[0].write(data_dir2+event+'/'+i_network+'_'+i_station+'_'+'HHN'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec + '.sac',format='SAC')
            
            ### East component 
            ## make and save plot of unfiltered data
            # plt.figure(figsize=figure_size)
            # plt.plot(ir_ste[0].times(),ir_ste[0].data,'g')
            # ## Plot a vertical line for the p-wave arrival
            # plt.axvline(x=start_td)
            # ## Plot a vertical line for the s-wave arrival
            # plt.axvline(x=start_td-sp.seconds)
            # plt.xlabel('Time from ')
            # plt.ylabel('Acceleration (m/s/s)')
            # plt.title('Instrument Response Removed, Unfiltered, \n' + i_network + i_station )
            # plt.savefig(data_dir1+event+'/'+i_network+'_'+i_station+'_'+'HHE'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.png')
            # plt.close('all')
            ir_ste[0].write(data_dir1+event+'/'+i_network+'_'+i_station+'_'+'HHE'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.sac',format='SAC')
            
            ## make and save plot of filtered data
            # plt.figure(figsize=figure_size)
            # plt.plot(ir_filt_ste[0].times(),ir_filt_ste[0].data,'g')
            # ## Plot a vertical line for the p-wave arrival
            # plt.axvline(x=start_td)
            # ## Plot a vertical line for the s-wave arrival
            # plt.axvline(x=start_td-sp.seconds)
            # plt.xlabel('Time from ')
            # plt.ylabel('Acceleration (m/s/s)')
            # plt.title('Instrument Response Removed, filtered, \n' + i_network + i_station )
            # plt.savefig(data_dir2+event+'/'+i_network+'_'+i_station+'_'+'HHE'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.png')
            # plt.close('all')
            ir_filt_ste[0].write(data_dir2+event+'/'+i_network+'_'+i_station+'_'+'HHE'+'_'+i_year+'_'+i_month+'_'+i_day+'_'+i_hr+'_'+i_min+ '_' + i_sec +'.sac',format='SAC')
            
            ## Add to arrays for new output file:
            out_network = np.append(out_network,i_network)
            out_station = np.append(out_station,i_station)
            out_stlat = np.append(out_stlat,i_stlat)
            out_stlon = np.append(out_stlon,i_stlon)
            out_stelv = np.append(out_stelv,i_stelv)
            out_quakenum = np.append(out_quakenum,i_eventnum)
            out_m = np.append(out_m,i_m)
            out_qlat = np.append(out_qlat,i_qlat)
            out_qlon = np.append(out_qlon,i_qlon)
            out_qdep = np.append(out_qdep,i_qdep)
            out_orgt = np.append(out_orgt,i_origintime)
            out_rhyp = np.append(out_rhyp,i_rhyp)
            out_parr = np.append(out_parr,i_parr)
            out_sarr = np.append(out_sarr,i_sarr)
    
    ## Make new dataframe and flatfile with output arrays:
    ## Dict:
    collected_dict = {'Network':out_network,'Name':out_station,'Slat':out_stlat,'Slon':out_stlon,
                      'Selv':out_stelv,'Quake#':out_quakenum,'Mag':out_m,'Qlat':out_qlat,
                      'Qlon':out_qlon,'Qdep':out_qdep,'OrgT':out_orgt,'rhyp':out_rhyp,
                      'Parr':out_parr,'Sarr':out_sarr}
    collected_df = pd.DataFrame(collected_dict)
    ## Write to file:
    collected_df.to_csv(output_directory+'collected_flatfile+'+np.str(rank)+'.csv')
    
    f.close()
コード例 #20
0
plt.figure(figsize=(12, 6))
plt.plot(st2[0].times(), st2[0].data)
plt.xlabel('Time from ')
plt.ylabel('Velocity (m/s)')
plt.title('Instrument Response Removed, Unfiltered, \n AT station SIT')
#plt.savefig(data_dir + 'at.sit_unfilt.png')

print('Write unfiltered response removed')
## Write the data (instr resp removed) to a SAC file, m/s
#st2.write(data_dir+'at.sit_unfilt.sac',format='SAC')
#%%
print('Filter')
## Filter the data with a highpass filter around the 13s microseism:
filtfreq = 1 / 13.
## Make a copy of the stream object:
st2filt = st2.copy()
## Highpass filter using 2 corners, zerophase true so it filters forwards and back
##   (so 4 corners in the end):
st2filt[0].filter('highpass', freq=filtfreq, corners=2, zerophase=True)

print('Plot filtered')
## Plot:
plt.figure(figsize=(12, 6))
plt.plot(st2filt[0].times(), st2filt[0].data)
plt.xlabel('Time from Aug17_2015 UTC midnight (s)')
plt.ylabel('Velocity (m/s)')
plt.title(
    'Instrument Response Removed, Highpass filtered 13sec, \n AT station SIT')
plt.savefig(data_dir + 'at.sit_hpfilt_13s.png')

print('Write to file')
コード例 #21
0
ファイル: rfpy_hk.py プロジェクト: shineusn/RfPy
def main():

    # Run Input Parser
    (opts, indb) = options.get_hk_options()

    # Load Database
    db = stdb.io.load_db(fname=indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(opts.stkeys) > 0:
        stkeys = []
        for skey in opts.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        datapath = 'DATA/' + stkey
        if not os.path.isdir(datapath):
            raise (Exception('Path to ' + datapath +
                             ' doesn`t exist - aborting'))

        # Get search start time
        if opts.startT is None:
            tstart = sta.startdate
        else:
            tstart = opts.startT

        # Get search end time
        if opts.endT is None:
            tend = sta.enddate
        else:
            tend = opts.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()

        for folder in os.listdir(datapath):

            date = folder.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                file = open(datapath + "/" + folder + "/RF_Data.pkl", "rb")
                rfdata = pickle.load(file)
                rfRstream.append(rfdata)
                file.close()

            else:
                continue

        # plotting.wiggle(rfRstream, sort='baz')

        # Try binning if specified
        if opts.nbin is not None:
            rf_tmp = binning.bin(rfRstream, typ='slow', nbin=opts.nbin + 1)
            rfRstream = rf_tmp[0]

        # Get a copy of the radial component and filter
        if opts.copy:
            rfRstream_copy = rfRstream.copy()
            rfRstream_copy.filter('bandpass',
                                  freqmin=opts.freqs_copy[0],
                                  freqmax=opts.freqs_copy[1],
                                  corners=2,
                                  zerophase=True)

        # Filter original stream
        rfRstream.filter('bandpass',
                         freqmin=opts.freqs[0],
                         freqmax=opts.freqs[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        try:
            hkstack = HkStack(rfRstream,
                              rfV2=rfRstream_copy,
                              strike=opts.strike,
                              dip=opts.dip,
                              vp=opts.vp)
        except:
            hkstack = HkStack(rfRstream,
                              strike=opts.strike,
                              dip=opts.dip,
                              vp=opts.vp)

        # Update attributes
        hkstack.hbound = opts.hbound
        hkstack.kbound = opts.kbound
        hkstack.dh = opts.dh
        hkstack.dk = opts.dk
        hkstack.weights = opts.weights

        # Stack with or without dip
        if opts.calc_dip:
            hkstack.stack_dip()
        else:
            hkstack.stack()

        # Average stacks
        hkstack.average(typ=opts.typ)

        if opts.plot:
            hkstack.plot(opts.save_plot, opts.title, opts.form)

        if opts.save:
            filename = datapath + "/" + hkstack.hstream[0].stats.station + \
                ".hkstack.pkl"
            hkstack.save(file=filename)
コード例 #22
0
if debug:
    print(files)

# Make a stream object to hold data
st = Stream()
for curfile in files:
    if debug:
        print('Here is our current file: ' + curfile)
    st += read(curfile, starttime=stime, endtime=etime)
st.merge()
if debug:
    print(st)
    st.plot()
    
# Here we can do a 10 volt test
st2 = st.copy()
nstime = UTCDateTime('2017-011T16:59:55.0')
netime = nstime + 20.
st2.trim(starttime=nstime, endtime=netime)
# st2 Now has data for our first 10 V test
# Now we convert st2 into Volts

# Make two empty lists to save our results
mminus =[]
sminus =[]

# This is the first voltage 
for tr in st2:
    tr.data.astype(np.float64)
    # Here we are converting from counts to Volts
    tr.data = tr.data*(40. /(2.**26))
コード例 #23
0
        st = Stream()
        std = Stream()
        st = read('/auto/proj/Cascadia/PermStatWF/*/*[HE]HZ.%s.%s' %
                  (yr, jday))

        for tr in st:
            num = tr.stats.npts
            samp = tr.stats.sampling_rate

            if num >= (samp * 86400) * .80:
                std.append(tr)

        if len(std) > 0:  # need wf in stream to cont.
            std.sort(['starttime'])
            std.merge()
            std1 = std.copy()

            # want full day of waveform - trim to start - end time
            starttime1 = dt
            if jday == 365:  #handles end of year
                endtime1 = UTCDateTime(year=yr + 1, julday=1, hour=0, minute=0)
            else:
                endtime1 = dt + 86400  #1 day
            std1.trim(starttime=starttime1, endtime=endtime1)

            #pre-process day waveform data -- needs to be same as processing for templates
            std_filter = pre_processing.dayproc(std1,
                                                lowcut=3,
                                                highcut=10,
                                                filt_order=4,
                                                samp_rate=25,
コード例 #24
0
ファイル: recordingloop2.py プロジェクト: taranye96/kappabay
    i_start = i_Parr - timedelta(seconds=td)
    i_end = i_Sarr + timedelta(seconds=90)
    i_network = str(i_network)
    i_station = str(i_station)
########################################################################################################################################3
# chlear stream objects 
    raw_stn = Stream()
    raw_ste = Stream()
#data download and plot for Nort
    try:
        raw_stn += client.get_waveforms(i_network, i_station, "*", 'HHN', UTCDateTime(i_start), UTCDateTime(i_end), attach_response=True)
    except:
        nummissed += .5
        print('oops')
        continue
    i_stn = raw_stn.copy()
    j_stn = raw_stn.copy()
    
    
    samprate = i_stn[0].stats['sampling_rate']
    ## Make the prefilt for the instrment response - AT.SIT is @ 50Hz so 25 is nyquist
    prefilt1 = (0.005, 0.006, ((samprate/2)-5), (samprate/2))  ## this is 20 to 25 at the end
    i_stn[0].remove_response(output='VEL',pre_filt=prefilt1) ## The units of data are now Velocity, m/s
    prefilt2 = (0.063, 0.28,((samprate/2)-5), (samprate/2)) ## 0.063 just above 16 sec microsiesem, .28 just above 4 sec
    j_stn[0].remove_response(output='VEL',pre_filt=prefilt2) ## The units of data are now Velocity, m/s
    
    ## make and save plot
    plt.figure(figsize=(12,6))
    plt.plot(i_stn[0].times(),i_stn[0].data,'g')
    plt.axvline(x=td)
    plt.axvline(x=td+sp.seconds)
コード例 #25
0
ファイル: test_trigger.py プロジェクト: kasra-hosseini/obspy
 def test_coincidenceTrigger(self):
     """
     Test network coincidence trigger.
     """
     st = Stream()
     files = ["BW.UH1._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH2._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH3._.SHZ.D.2010.147.cut.slist.gz",
              "BW.UH4._.EHZ.D.2010.147.cut.slist.gz"]
     for filename in files:
         filename = os.path.join(self.path, filename)
         st += read(filename)
     # some prefiltering used for UH network
     st.filter('bandpass', freqmin=10, freqmax=20)
     # 1. no weighting, no stations specified, good settings
     # => 3 events, no false triggers
     # for the first test we make some additional tests regarding types
     res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3, sta=0.5,
                              lta=10)
     self.assertTrue(isinstance(res, list))
     self.assertTrue(len(res) == 3)
     expected_keys = ['time', 'coincidence_sum', 'duration', 'stations',
                      'trace_ids']
     expected_types = [UTCDateTime, float, float, list, list]
     for item in res:
         self.assertTrue(isinstance(item, dict))
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 3)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 4)
     # 2. no weighting, station selection
     # => 2 events, no false triggers
     trace_ids = ['BW.UH1..SHZ', 'BW.UH3..SHZ', 'BW.UH4..EHZ']
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 3,
                                  trace_ids=trace_ids, sta=0.5, lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(4.2 < re[0]['duration'] < 4.8)
         self.assertTrue(re[0]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[0]['coincidence_sum'] == 3)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(4.2 < re[1]['duration'] < 4.4)
         self.assertTrue(re[1]['stations'] == ['UH3', 'UH1', 'UH4'])
         self.assertTrue(re[1]['coincidence_sum'] == 3)
     # 3. weighting, station selection
     # => 3 events, no false triggers
     trace_ids = {'BW.UH1..SHZ': 0.4, 'BW.UH2..SHZ': 0.35,
                  'BW.UH3..SHZ': 0.4, 'BW.UH4..EHZ': 0.25}
     res = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.0,
                              trace_ids=trace_ids, sta=0.5, lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', 'UH4'])
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     # 4. weighting, station selection, max_len
     # => 2 events, no false triggers, small event does not overlap anymore
     trace_ids = {'BW.UH1..SHZ': 0.6, 'BW.UH2..SHZ': 0.6}
     # ignore UserWarnings
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('ignore', UserWarning)
         re = coincidenceTrigger("recstalta", 3.5, 1, st.copy(), 1.2,
                                  trace_ids=trace_ids,
                                  max_trigger_length=0.13, sta=0.5, lta=10)
         self.assertTrue(len(re) == 2)
         self.assertTrue(re[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
         self.assertTrue(re[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
         self.assertTrue(0.2 < re[0]['duration'] < 0.3)
         self.assertTrue(re[0]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[0]['coincidence_sum'] == 1.2)
         self.assertTrue(re[1]['time'] > UTCDateTime("2010-05-27T16:27:27"))
         self.assertTrue(re[1]['time'] < UTCDateTime("2010-05-27T16:27:33"))
         self.assertTrue(0.18 < re[1]['duration'] < 0.2)
         self.assertTrue(re[1]['stations'] == ['UH2', 'UH1'])
         self.assertTrue(re[1]['coincidence_sum'] == 1.2)
     # 5. station selection, extremely sensitive settings
     # => 4 events, 1 false triggers
     res = coincidenceTrigger("recstalta", 2.5, 1, st.copy(), 2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3, lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 6. same as 5, gappy stream
     # => same as 5 (almost, duration of 1 event changes by 0.02s)
     st2 = st.copy()
     tr1 = st2.pop(0)
     t1 = tr1.stats.starttime
     t2 = tr1.stats.endtime
     td = t2 - t1
     tr1a = tr1.slice(starttime=t1, endtime=t1 + 0.45 * td)
     tr1b = tr1.slice(starttime=t1 + 0.6 * td, endtime=t1 + 0.94 * td)
     st2.insert(1, tr1a)
     st2.insert(3, tr1b)
     res = coincidenceTrigger("recstalta", 2.5, 1, st2, 2,
                              trace_ids=['BW.UH1..SHZ', 'BW.UH3..SHZ'],
                              sta=0.3, lta=5)
     self.assertTrue(len(res) == 5)
     self.assertTrue(res[3]['time'] > UTCDateTime("2010-05-27T16:27:01"))
     self.assertTrue(res[3]['time'] < UTCDateTime("2010-05-27T16:27:02"))
     self.assertTrue(1.5 < res[3]['duration'] < 1.7)
     self.assertTrue(res[3]['stations'] == ['UH3', 'UH1'])
     self.assertTrue(res[3]['coincidence_sum'] == 2.0)
     # 7. same as 3 but modify input trace ids and check output of trace_ids
     # and other additional information with ``details=True``
     st2 = st.copy()
     st2[0].stats.network = "XX"
     st2[1].stats.location = "99"
     st2[1].stats.network = ""
     st2[1].stats.location = "99"
     st2[1].stats.channel = ""
     st2[2].stats.channel = "EHN"
     st2[3].stats.network = ""
     st2[3].stats.channel = ""
     st2[3].stats.station = ""
     trace_ids = {'XX.UH1..SHZ': 0.4, '.UH2.99.': 0.35,
                  'BW.UH3..EHN': 0.4, '...': 0.25}
     res = coincidenceTrigger("recstalta", 3.5, 1, st2, 1.0,
                              trace_ids=trace_ids, details=True,
                              sta=0.5, lta=10)
     self.assertTrue(len(res) == 3)
     self.assertTrue(res[0]['time'] > UTCDateTime("2010-05-27T16:24:31"))
     self.assertTrue(res[0]['time'] < UTCDateTime("2010-05-27T16:24:35"))
     self.assertTrue(4.2 < res[0]['duration'] < 4.8)
     self.assertTrue(res[0]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[0]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[0]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[0]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[0]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[0]['coincidence_sum'] == 1.4)
     self.assertTrue(res[1]['time'] > UTCDateTime("2010-05-27T16:26:59"))
     self.assertTrue(res[1]['time'] < UTCDateTime("2010-05-27T16:27:03"))
     self.assertTrue(3.2 < res[1]['duration'] < 3.7)
     self.assertTrue(res[1]['stations'] == ['UH2', 'UH3', 'UH1'])
     self.assertTrue(res[1]['trace_ids'][0] == st2[1].id)
     self.assertTrue(res[1]['trace_ids'][1] == st2[2].id)
     self.assertTrue(res[1]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[1]['coincidence_sum'] == 1.15)
     self.assertTrue(res[2]['time'] > UTCDateTime("2010-05-27T16:27:27"))
     self.assertTrue(res[2]['time'] < UTCDateTime("2010-05-27T16:27:33"))
     self.assertTrue(4.2 < res[2]['duration'] < 4.4)
     self.assertTrue(res[2]['stations'] == ['UH3', 'UH2', 'UH1', ''])
     self.assertTrue(res[2]['trace_ids'][0] == st2[2].id)
     self.assertTrue(res[2]['trace_ids'][1] == st2[1].id)
     self.assertTrue(res[2]['trace_ids'][2] == st2[0].id)
     self.assertTrue(res[2]['trace_ids'][3] == st2[3].id)
     self.assertTrue(res[2]['coincidence_sum'] == 1.4)
     expected_keys = ['cft_peak_wmean', 'cft_std_wmean', 'cft_peaks',
                      'cft_stds']
     expected_types = [float, float, list, list]
     for item in res:
         for key, _type in zip(expected_keys, expected_types):
             self.assertTrue(key in item)
             self.assertTrue(isinstance(item[key], _type))
     # check some of the detailed info
     ev = res[-1]
     self.assertAlmostEquals(ev['cft_peak_wmean'], 18.097582068353855)
     self.assertAlmostEquals(ev['cft_std_wmean'], 4.7972436395074087)
     self.assertAlmostEquals(ev['cft_peaks'][0], 18.973097608513633)
     self.assertAlmostEquals(ev['cft_peaks'][1], 16.852175794415011)
     self.assertAlmostEquals(ev['cft_peaks'][2], 18.64005853900883)
     self.assertAlmostEquals(ev['cft_peaks'][3], 17.572363634564621)
     self.assertAlmostEquals(ev['cft_stds'][0], 4.8811165222946951)
     self.assertAlmostEquals(ev['cft_stds'][1], 4.4446373508521804)
     self.assertAlmostEquals(ev['cft_stds'][2], 5.3499401252675964)
     self.assertAlmostEquals(ev['cft_stds'][3], 4.2723814539487703)
コード例 #26
0
ファイル: stalta4kw_st.py プロジェクト: obspy/branches
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
    # preprocessing, backup original data for plotting at end
    st.merge(0)
    st.detrend("linear")
    for tr in st:
        tr.data = tr.data * cosTaper(len(tr), 0.01)
    #st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger = st.copy()
    st.normalize(global_max=False)
    # do the triggering
    trig = coincidenceTrigger("recstalta", PAR.ON, PAR.OFF, st_trigger,
            thr_coincidence_sum=PAR.MIN_STATIONS,
            max_trigger_length=PAR.MAXLEN, trigger_off_extension=PAR.ALLOWANCE,
            details=True, sta=PAR.STA, lta=PAR.LTA)

    for t in trig:
        info = "%s %ss %s %s" % (t['time'].strftime("%Y-%m-%dT%H:%M:%S"), ("%.1f" % t['duration']).rjust(4), ("%i" % t['cft_peak_wmean']).rjust(3), "-".join(t['stations']))
        summary.append(info)
        tmp = st.slice(t['time'] - 1, t['time'] + t['duration'])
        outfilename = "%s/%s_%.1f_%i_%s-%s_%s.png" % (PLOTDIR, t['time'].strftime("%Y-%m-%dT%H:%M:%S"), t['duration'], t['cft_peak_wmean'], len(t['stations']), num_stations, "-".join(t['stations']))
        tmp.plot(outfile=outfilename)
        mutt += ("-a", outfilename)
コード例 #27
0
if debug:
    print(files)

# Make a stream object to hold data
st = Stream()
for curfile in files:
    if debug:
        print('Here is our current file: ' + curfile)
    st += read(curfile, starttime=stime, endtime=etime)
st.merge()
if debug:
    print(st)
    st.plot()

# Here we can do a 10 volt test
st2 = st.copy()
nstime = UTCDateTime('2017-017T22:19:36.0')
netime = nstime + 10.
st2.trim(starttime=nstime, endtime=netime)
# st2 Now has data for our first 10 V test
# Now we convert st2 into Volts

# Make two empty lists to save our results
mminus = []
sminus = []

# This is the first voltage
for tr in st2:
    tr.data.astype(np.float64)
    # Here we are converting from counts to Volts
    tr.data = tr.data * (40. / (2.**26))