def get_timeseries(self, starttime, endtime, observatory=None,
            channels=None, type=None, interval=None):
        """Implements get_timeseries

        Notes: Calls IMFV283Factory.parse_string in place of
            IMFV283Factory.get_timeseries.
        """
        observatory = observatory or self.observatory
        channels = channels or self.channels
        self.criteria_file_name = observatory + '.sc'
        timeseries = Stream()
        output = self._retrieve_goes_messages(starttime, endtime, observatory)
        timeseries += self.parse_string(output)
        # merge channel traces for multiple days
        timeseries.merge()
        # trim to requested start/end time
        timeseries.trim(starttime, endtime)
        # output the number of points we read for logging
        if len(timeseries):
            print("Read %s points from %s" % (timeseries[0].stats.npts,
                observatory), file=sys.stderr)

        self._post_process(timeseries)
        if observatory is not None:
            timeseries = timeseries.select(station=observatory)

        return timeseries
    def get_timeseries(self,
                       starttime,
                       endtime,
                       observatory=None,
                       channels=None,
                       type=None,
                       interval=None):
        """Implements get_timeseries

        Notes: Calls IMFV283Factory.parse_string in place of
            IMFV283Factory.get_timeseries.
        """
        observatory = observatory or self.observatory
        channels = channels or self.channels
        self.criteria_file_name = observatory + '.sc'
        timeseries = Stream()
        output = self._retrieve_goes_messages(starttime, endtime, observatory)
        timeseries += self.parse_string(output)
        # merge channel traces for multiple days
        timeseries.merge()
        # trim to requested start/end time
        timeseries.trim(starttime, endtime)
        # output the number of points we read for logging
        if len(timeseries):
            print >> sys.stderr, "Read %s points from %s" % \
                (timeseries[0].stats.npts, observatory)

        self._post_process(timeseries)
        if observatory is not None:
            timeseries = timeseries.select(station=observatory)

        return timeseries
예제 #3
0
def merge_single(nch,dstart,dend):
  '''Merges traces of one channel to larger traces. Used for cross-correlation'''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream


  dataDir2 = "/import/neptun-radler/STEINACH_feb/"
  dataDir = "/import/three-data/hadzii/STEINACH/STEINACH_longtime/"
  outdir = "/home/jsalvermoser/Desktop/Processing/out_merged"

  tr = []

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    #st.detrend('linear')
    tr.append(st[nch-1])


  
  new_stream = Stream(traces=tr)
  new_stream.merge(method=1, fill_value='interpolate')

  start = new_stream[0].stats.starttime
  end = new_stream[0].stats.endtime

  timeframe = str(nch)+ "_" + str(start.year) +'.'+ str(start.julday) +'.'+ str(start.hour) +'.'+ str(start.minute) +'.'+ str(start.second) \
      +'-'+ str(end.year) +'.'+ str(end.julday) +'.'+ str(end.hour) +'.'+ str(end.minute) +'.'+ str(end.second)

  new_stream.write(outdir + timeframe + ".mseed", format="MSEED")

  return new_stream[0]
예제 #4
0
def merge_single(nch,dstart,dend):
  '''Merges traces of one channel to larger traces. Used for cross-correlation'''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream


  # directories:
  dataDir2 = "/import/neptun-radler/STEINACH_feb/"
  dataDir = "/import/three-data/hadzii/STEINACH/STEINACH_longtime/"


  tr = []

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    tr.append(st[nch-1])


  
  new_stream = Stream(traces=tr)
  new_stream.merge(method=1, fill_value='interpolate')


  return new_stream
예제 #5
0
파일: psd.py 프로젝트: kasra-hosseini/obspy
    def add(self, stream, verbose=False):
        """
        Process all traces with compatible information and add their spectral
        estimates to the histogram containg the probabilistic psd.
        Also ensures that no piece of data is inserted twice.

        :type stream: :class:`~obspy.core.stream.Stream` or
                :class:`~obspy.core.trace.Trace`
        :param stream: Stream or trace with data that should be added to the
                probabilistic psd histogram.
        :returns: True if appropriate data were found and the ppsd statistics
                were changed, False otherwise.
        """
        # return later if any changes were applied to the ppsd statistics
        changed = False
        # prepare the list of traces to go through
        if isinstance(stream, Trace):
            stream = Stream([stream])
        # select appropriate traces
        stream = stream.select(id=self.id,
                               sampling_rate=self.sampling_rate)
        # save information on available data and gaps
        self.__insert_data_times(stream)
        self.__insert_gap_times(stream)
        # merge depending on skip_on_gaps set during __init__
        stream.merge(self.merge_method, fill_value=0)

        for tr in stream:
            # the following check should not be necessary due to the select()..
            if not self.__sanity_check(tr):
                msg = "Skipping incompatible trace."
                warnings.warn(msg)
                continue
            t1 = tr.stats.starttime
            t2 = tr.stats.endtime
            while t1 + PPSD_LENGTH <= t2:
                if self.__check_time_present(t1):
                    msg = "Already covered time spans detected (e.g. %s), " + \
                          "skipping these slices."
                    msg = msg % t1
                    warnings.warn(msg)
                else:
                    # throw warnings if trace length is different
                    # than one hour..!?!
                    slice = tr.slice(t1, t1 + PPSD_LENGTH)
                    # XXX not good, should be working in place somehow
                    # XXX how to do it with the padding, though?
                    success = self.__process(slice)
                    if success:
                        self.__insert_used_time(t1)
                        if verbose:
                            print t1
                        changed = True
                t1 += PPSD_STRIDE  # advance half an hour

            # enforce time limits, pad zeros if gaps
            #tr.trim(t, t+PPSD_LENGTH, pad=True)
        return changed
예제 #6
0
파일: psd.py 프로젝트: msimon00/obspy
    def add(self, stream, verbose=False):
        """
        Process all traces with compatible information and add their spectral
        estimates to the histogram containg the probabilistic psd.
        Also ensures that no piece of data is inserted twice.

        :type stream: :class:`~obspy.core.stream.Stream` or
                :class:`~obspy.core.trace.Trace`
        :param stream: Stream or trace with data that should be added to the
                probabilistic psd histogram.
        :returns: True if appropriate data were found and the ppsd statistics
                were changed, False otherwise.
        """
        # return later if any changes were applied to the ppsd statistics
        changed = False
        # prepare the list of traces to go through
        if isinstance(stream, Trace):
            stream = Stream([stream])
        # select appropriate traces
        stream = stream.select(id=self.id, sampling_rate=self.sampling_rate)
        # save information on available data and gaps
        self.__insert_data_times(stream)
        self.__insert_gap_times(stream)
        # merge depending on skip_on_gaps set during __init__
        stream.merge(self.merge_method, fill_value=0)

        for tr in stream:
            # the following check should not be necessary due to the select()..
            if not self.__sanity_check(tr):
                msg = "Skipping incompatible trace."
                warnings.warn(msg)
                continue
            t1 = tr.stats.starttime
            t2 = tr.stats.endtime
            while t1 + PPSD_LENGTH <= t2:
                if self.__check_time_present(t1):
                    msg = "Already covered time spans detected (e.g. %s), " + \
                          "skipping these slices."
                    msg = msg % t1
                    warnings.warn(msg)
                else:
                    # throw warnings if trace length is different
                    # than one hour..!?!
                    slice = tr.slice(t1, t1 + PPSD_LENGTH)
                    # XXX not good, should be working in place somehow
                    # XXX how to do it with the padding, though?
                    success = self.__process(slice)
                    if success:
                        self.__insert_used_time(t1)
                        if verbose:
                            print t1
                        changed = True
                t1 += PPSD_STRIDE  # advance half an hour

            # enforce time limits, pad zeros if gaps
            #tr.trim(t, t+PPSD_LENGTH, pad=True)
        return changed
def find_seismogram(top_level_dir,
                    starttime,
                    endtime,
                    stations=['S12', 'S14', 'S15', 'S16'],
                    channels=['MH1', 'MH2', 'MHZ'],
                    dir_type='pdart_dir'):

    for station in stations:
        stream = Stream()
        channel = '*'
        if dir_type == 'processed_dir':
            dir = find_processed_dir(top_level_dir, starttime.year, station)
            filename = '*%s.%s.%s.%s.%s.%03d*.gz' % (
                'XA', station, '*', channel, str(
                    starttime.year), starttime.julday)
        else:
            dir = find_dir(top_level_dir, starttime.year, station, channel)
            filename = '%s.%s.%s.%s.%s.%03d.gz' % ('XA', station, '*', channel,
                                                   str(starttime.year),
                                                   starttime.julday)
        filename = os.path.join(dir, filename)
        try:
            stream += read(filename)
        except Exception as e:
            print(str(e))

        if starttime.julday != endtime.julday:
            if dir_type == 'processed_dir':
                dir = find_processed_dir(top_level_dir, endtime.year, station)
                filename = '*%s.%s.%s.%s.%s.%03d*.gz' % (
                    'XA', station, '*', channel, str(
                        endtime.year), endtime.julday)
            else:
                dir = find_dir(top_level_dir, endtime.year, station, channel)
                filename = '*%s.%s.%s.%s.%s.%03d*.gz' % (
                    'XA', station, '*', channel, str(
                        endtime.year), endtime.julday)
            filename = os.path.join(dir, filename)
            try:
                stream += read(filename)
            except Exception as e:
                print(str(e))

        # print('Before ', stream)
        stream = stream.trim(starttime=starttime, endtime=endtime)
        # print('After ', stream)

        if stream is not None and len(stream) > 0:
            for tr in stream:
                tr.stats.location = ''
                if tr.stats.channel not in channels:
                    stream.remove(tr)

            stream.merge()

    return stream
예제 #8
0
    def get_waveforms(self,
                      network,
                      station,
                      location,
                      channel,
                      starttime,
                      endtime,
                      automerge=False,
                      trace_count_threshold=200):

        starttime = UTCDateTime(starttime).timestamp
        endtime = UTCDateTime(endtime).timestamp

        query = "select * from wdb where net='%s' and sta='%s' and loc='%s' and cha='%s' " \
                %(network, station, location, channel) + \
                "and et>=%f and st<=%f" \
                 % (starttime, endtime)

        rows = self.conn.execute(query).fetchall()
        s = Stream()

        if (len(rows) > trace_count_threshold): return s

        #print 'rank: %d net: %s sta: %s loc:%s cha:%s ntraces: %d'%(self.rank,
        #                                                           network, station,
        #                                                           location, channel, len(rows))
        #return s

        for row in rows:
            ds_id, net, sta, loc, cha, st, et, tag = row
            station_data = self.asdf_datasets[ds_id].waveforms['%s.%s' %
                                                               (net, sta)]
            try:
                s += station_data[tag]
            except:
                pass
            # end try
        # end for

        if (automerge):
            try:
                s.merge(method=-1)
            except:
                pass
            # end try
        # end if

        # Trim traces
        for t in s:
            t.trim(starttime=UTCDateTime(starttime),
                   endtime=UTCDateTime(endtime))
        # end for

        return s
예제 #9
0
def stream_seishub_read(host="localhost", port=8080, timeout=100,
                 start_time="2010-01-01 00:20:03", time_interval=30,
                 network_id="PF", station_id="", location_id="",
                 channel_id="HLE", get_paz=False, remove_mean=False,
                 remove_trend=False):
    """ Seishub server client.

    For a detailed description of how it works refer to ObsPy website
    (obspy.org)

    """

    client = Client_seis(base_url="http://" + host + ':' + str(port),
                         timeout=timeout)
    t = UTCDateTime(start_time)

    st = Stream()

    if station_id == "":
        st = client.waveform.getWaveform(network_id, str(station_id),
                                         location_id,
                                         channel_id, t, t + time_interval)
    else:
        for station in station_id:
            try:
                st += client.waveform.getWaveform(network_id, str(station),
                                                  location_id,
                                                  channel_id,
                                                  t,
                                                  t + time_interval)
            except:
                pass

    if len(st) > 0:
        if remove_trend:
            st = stream_detrend(st)

        if remove_mean:
            st = stream_demean(st)

        st.merge(method=1, fill_value=0, interpolation_samples=1)
        n_trace = len(st)
    else:
        n_trace = 0

    if get_paz:
        paz = client.station.getPAZ(network_id, station_id, t)
        return st, paz, n_trace
    else:
        return st, n_trace

    return st, n_trace
예제 #10
0
    def add(self, stream, verbose=True):
        """
        Process all traces with compatible information and add their spectral
        estimates to the histogram containg the probabilistic psd.
        Also ensures that no piece of data is inserted twice.
        """
        # return later if any changes were applied to the ppsd statistics
        changed = False
        # prepare the list of traces to go through
        if isinstance(stream, Trace):
            stream = Stream([stream])
        # select appropriate traces
        stream = stream.select(id=self.id,
                               sampling_rate=self.sampling_rate)
        # save information on available data and gaps
        self.__insert_data_times(stream)
        self.__insert_gap_times(stream)
        # merge depending on skip_on_gaps set during __init__
        stream.merge(self.merge_method, fill_value=0)

        for tr in stream:
            # the following check should not be necessary due to the select()..
            if not self.__sanity_check(tr):
                msg = "Skipping incompatible trace."
                warnings.warn(msg)
                continue
            t1 = tr.stats.starttime
            t2 = tr.stats.endtime
            while t1 + PPSD_LENGTH <= t2:
                if self.__check_time_present(t1):
                    msg = "Already computed time spans detected (e.g. %s), " + \
                          "skipping these slices."
                    msg = msg % t1
                    print msg
                else:
                    # throw warnings if trace length is different than one
                    # hour..!?!
                    slice = tr.slice(t1, t1 + PPSD_LENGTH)
                    success = self.__process(slice)
                    if success:
                        self.__insert_used_time(t1)
                        if verbose:
                            stdout.write("\r adding %s" % t1)
                            stdout.flush()
                        changed = True
                t1 += PPSD_STRIDE  # advance half an hour
        if verbose:
            stdout.write("\r")
            stdout.flush()
        return changed
예제 #11
0
def get_streams_gema(networks,
                     stations,
                     starttime,
                     endtime,
                     only_vertical_channel=False,
                     local_dir_name=None):
    if not local_dir_name:
        local_dir_name = "%s/archive" % (os.getenv("HOME"))

    if only_vertical_channel:
        channels = "*Z"
    else:
        channels = "*"

    # READ ARCHIVE DATABASE
    st = Stream()
    this_day = UTCDateTime(starttime.strftime("%Y-%m-%d"))
    last_day = UTCDateTime(endtime.strftime("%Y-%m-%d"))
    while this_day <= last_day:
        for network, station in zip(networks, stations):
            pattern = '%s/%s/%s/%s/%s*' % (local_dir_name,
                                           this_day.strftime("%Y"), network,
                                           station, channels)
            paths_ch = sorted(glob.glob(pattern))
            for path in paths_ch:
                pattern = "%s/*%s" % (path, this_day.strftime("%Y.%03j"))
                msfile_list = glob.glob(pattern)
                if len(msfile_list) > 0:
                    for msfile in msfile_list:
                        st += read(msfile,
                                   starttime=starttime,
                                   endtime=endtime)

        this_day += 86400

    # PATCH PROBLEM DIFFERENT SAMPLING RATES IN LONQ STATION FROM SCREAM
    for tr in st.select(station="LONQ"):
        if tr.stats.sampling_rate != 50:
            st.remove(tr)

    # EXPORT GAPS AND MERGE STREAM
    gaps = st.get_gaps()
    if len(st) > 0:  # and len(gaps)>0
        st.trim(starttime, endtime)
        st.merge(method=1, interpolation_samples=-1, fill_value='interpolate')

    return st, gaps
def get_arraywaveforms(session, Site, Wfdisc, array, t0=None, te=None, channel=None):
    ssSite = session.query(Site).filter(Site.refsta==array).all()
    wf = session.query(Wfdisc)
    aa = Stream()

    for ssi in ssSite:
        if t0 == None:
            wfT = wf.filter(Wfdisc.sta==ssi.sta).all()
            timeI = []
            timeF = []
            for wfTi in wfT:
                timeI.append(wfTi.time)
                timeF.append(wfTi.endtime)
                timeI = np.asanyarray(timeI)
                timeF = np.asanyarray(timeF)
                t0 = max(timeI)
                te = min(timeF)

        for t1 in range(5):
            try:
                aaT = request.get_waveforms(session, Wfdisc, station=ssi.sta, starttime=t0,
                                            endtime=te,channel=channel)
                break
            except:
                print('try get data:',t1)
                print('go to sleep for 5 seconds and try again')
                time.sleep(5)

            if t1 == 4:
                print('There is problem connecting to the data waveforms')
                exit()

        if len(aaT) == 0:
            print('maybe this is a ref name, sta:',ssi.sta)
            continue

        aaT.merge(fill_value=0)
        #if not len(aaT) == 1:
        #    print('there is a problem with data retrieving; there is more than one trace for this station')
        #    sys.exit(0)

        aaT[0].stats.coordinates = AttribDict({'latitude': ssi.lat,'elevation': ssi.elev,'longitude': ssi.lon})
        aa = aa + aaT

    aa.merge(fill_value=0)

    return aa
예제 #13
0
def comp_ppsd(sta, debug=True):
    net = "IU"
    for chan in chans:
        print('On:' + sta + ' ' + chan)
        stime = UTCDateTime('2016-280T00:00:00')
        etime = UTCDateTime('2019-280T00:00:00')
        ctime = stime
        if sta == 'QSPA':
            sen = 3.43 * 10**9
        else:
            sen = 41.943
        paz = {'gain': 1., 'poles': [], 'zeros': [], 'sensitivity': sen}
        while ctime < etime:
            if debug:
                print(ctime)
            try:
                #if True:
                st = Stream()
                nctime = ctime
                for addday in range(5):
                    nctime = ctime + addday * 24 * 60 * 60
                    st += read('/msd/' + net + '_' + sta + '/' +
                               str(nctime.year) + '/' +
                               str(nctime.julday).zfill(3) + '/*' + chan + '*')
                if debug:
                    print(st)
            except:
                ctime += 5 * 24 * 60 * 60
                continue
            st.merge(fill_value=0)
            if 'ppsd' not in vars():
                ppsd = PPSD(st[0].stats,
                            paz,
                            db_bins=(-100, 100, 1.),
                            ppsd_length=2**14,
                            period_smoothing_width_octaves=0.50,
                            special_handling="ringlaser")
            ppsd.add(st)
            ctime += 5 * 24 * 60 * 60
        ppsd.save_npz('PDF_DATA_' + net + '_' + sta + '_' + chan + ".npz")
        ppsd.plot(filename='PPSD_' + net + '_' + sta + '_' + chan + '.PNG',
                  show_noise_models=False,
                  period_lim=(2., 1000.))
        del ppsd
    return
예제 #14
0
def merge_single(nch,dstart,dend,dataDir):
  '''Merges traces of one channel to larger traces. Used for cross-correlation'''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream

  tr = []

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    tr.append(st[nch-1])
 
  new_stream = Stream(traces=tr)
  new_stream.merge(method=1, fill_value='interpolate')
  return new_stream
예제 #15
0
    def read_from_SDS(self, sds_root, net_name, sta_name, comp_name,
                      starttime=None, endtime=None, rmean=False, taper=False,
                      pad_value=None):
        """
        Read waveform data from an SDS structured archive.  Simple overlaps and
        adjacent traces are merged if possile.

        :param sds_root: root of the SDS archive
        :param net_name: network name
        :param sta_name: station name
        :param comp_name: component name
        :param starttime: Start time of data to be read.
        :param endtime: End time of data to be read.
        :param rmean: If ``True`` removes the mean from the data upon reading.
            If data are segmented, the mean will be removed from all segments
            individually.
        :param taper: If ``True`` applies a cosine taper to the data upon
            reading.  If data are segmented, tapers are applied to all segments
            individually.
        :param pad_value: If this parameter is set, points between
            ``starttime`` and the first point in the file, and points between
            the last point in the file and ``endtime``, will be set to
            ``pad_value``.  You may want to also use the ``rmean`` and
            ``taper`` parameters, depending on the nature of the data.

        :type sds_root: string
        :type net_name: string
        :type sta_name: string
        :type comp_name: string
        :type starttime: ``obspy.core.utcdatetime.UTCDateTime`` object,
            optional
        :type endtime: ``obspy.core.utcdatetime.UTCDateTime`` object, optional
        :type rmean: boolean, optional
        :type taper: boolean, optional
        :type pad_value: float, optional

        :raises UserWarning: If there are no data between ``starttime`` and
            ``endtime``

        """

        logging.info("Reading from SDS structure %s %s %s ..." %
                     (net_name, sta_name, comp_name))

        # Get the complete file list. If a directory, get all the filenames.
        filename = os.path.join(sds_root, net_name, sta_name,
                                "%s.D" % comp_name, "*")
        logging.debug("Reading %s between %s and %s" %
                      (filename, starttime.isoformat(), endtime.isoformat()))
        if os.path.isdir(glob.glob(filename)[0]):
            filename = os.path.join(filename, "*")
        file_glob = glob.glob(filename)

        # read header from all files to keep only those within the time limits
        fnames_within_times = []
        for fname in file_glob:
            st_head = stream.read(fname, headonly=True)
            # retrieve first_start and last_end time for the stream
            # without making any assumptions on order of traces
            first_start = st_head[0].stats.starttime
            last_end = st_head[0].stats.endtime
            # find earliest start time and latest end time in stream
            for tr in st_head:
                if tr.stats.starttime < first_start:
                    first_start = tr.stats.starttime
                if tr.stats.endtime > last_end:
                    last_end = tr.stats.endtime
            # add to list if start or end time are within our requested limits
            if (first_start < endtime and last_end > starttime):
                fnames_within_times.append(fname)

        logging.debug("Found %d files to read" % len(fnames_within_times))

        # now read the full data only for the relevant files
        st = Stream()
        for fname in fnames_within_times:
            st_tmp = read(fname, starttime=starttime, endtime=endtime)
            for tr in st_tmp:
                st.append(tr)
        # and merge nicely
        st.merge(method=-1)

        if st.count() > 1:  # There are gaps after sensible cleanup merging
            logging.info("File contains gaps:")
            st.printGaps()

        # apply rmean if requested
        if rmean:
            logging.info("Removing the mean from single traces.")
            st = stream_rmean(st)

        # apply rmean if requested
        if taper:
            logging.info("Tapering single traces.")
            st = stream_taper(st)

        if not pad_value is None:
            try:
                first_tr = st.traces[0]
                # save delta (to save typing)
                delta = first_tr.stats.delta
                if (not starttime is None) and \
                   ((first_tr.stats.starttime - starttime) > delta):
                    logging.debug("Padding with value %f from %s to first\
                                   point in file at %s." %
                                  (pad_value,
                                   starttime.isoformat(),
                                   first_tr.stats.starttime.isoformat()))
                    # find the number of points from starttime to
                    # end of the first trace
                    npts_full_trace = \
                        int(np.floor((first_tr.stats.endtime -
                                      starttime) / delta))+1
                    # find the number of points of the padding section
                    n_pad = npts_full_trace-first_tr.stats.npts
                    # fill the full time range with padd value
                    tr_pad = np.zeros(npts_full_trace)+pad_value
                    # substitute in the data
                    tr_pad[n_pad:] = first_tr.data[:]
                    first_tr.data = tr_pad
                    first_tr.stats.starttime = starttime
                    first_tr.stats.npts = npts_full_trace
                    st.traces[0] = first_tr

                last_tr = st.traces[-1]
                # save delta (to save typing)
                delta = last_tr.stats.delta
                if (not endtime is None) and \
                   ((endtime - last_tr.stats.endtime) > delta):
                    logging.debug("Padding with value %f from last point\
                                   in file at %s to %s." %
                                  (pad_value,
                                   last_tr.stats.endtime.isoformat(),
                                   endtime.isoformat()))
                    # find the number of points from endtime to
                    # start of the last trace
                    npts_full_trace = \
                        int(np.floor((endtime -
                                      last_tr.stats.starttime) / delta))+1
                    # fill the full time range with padd value
                    tr_pad = np.zeros(npts_full_trace)+pad_value
                    # substitute in the data
                    tr_pad[0:last_tr.stats.npts] = last_tr.data[:]
                    last_tr.data = tr_pad
                    last_tr.stats.npts = npts_full_trace
                    st.traces[-1] = last_tr

            except IndexError:
                logging.warning('No data within time limits requested')
                raise UserWarning('No data within time limits requested.')

        try:
            self.stream = st
            self.trace = st.traces[0]
            self.proc = "None"
        except IndexError:
            raise UserWarning('No data within time limits requested.')
예제 #16
0
def spotlme(lat, lon, dep, loading, stime, etime, srate, debug=True):
    if loading:

        tides = ['k1', 'k2', 'm2', 'm4', 'mf', 'mm', 'n2', 'o1', 'p1', 'q1', 's2']

        for idx, tide in enumerate(tides):
            string = '../bin/nloadf TEMP ' + str(lat) + ' ' + str(lon) + ' ' + str(dep) + ' '
            string += tide + '.osu.tpxo72.2010 ' + 'green.gbavap.std l'
            if idx == 0:
                pipe = ' >'
            else: 
                pipe = ' >>'
            cmd = string + ' ' + pipe + ' LoadALL' 
            os.system(cmd)
            if debug:
                print(cmd)
        st = Stream()
        for comp in ['Z', 'N', 'E']:
            if debug:
                print('On component:' + comp)
            
            cmd = 'cat LoadALL | ../bin/harprp '
            if comp == 'Z':
                cmd += 'g '
            elif comp == 'N':
                cmd += 't 0 '
            else:
                cmd += 't 90 '
            cmd += '> tempLoad2'    
            if debug:
                print(cmd)
            os.system(cmd)

            ctime = stime
            #cmd = 'cat tempLoad2 | ../bin/loadcomb t'
            tstring = str(ctime.year) + ' ' + str(ctime.julday) + ' 0 0 0'
            vals = int((etime -stime)/(srate))
            cmd = 'cat tempLoad2 | ../bin/hartid ' + tstring + ' ' + str(vals) + ' ' + str(srate) + ' > temp' + comp
            if debug:
                print(cmd)
            os.system(cmd)

            f=open('temp' + comp,'r')
            data = []
            for line in f:
                if comp == 'Z':
                    data.append(-float(line))
                else:
                    data.append(-float(line))

            f.close()
            data = np.array(data)
            stats = {'network': 'XX', 'station': sta, 'location': '',
                'channel' : 'LH' + comp, 'npts': len(data), 'sampling_rate': 1./srate,
                'mseed' : {'dataquality': 'D'}}
            stats['starttime'] = ctime
            st += Stream([Trace(data=data, header = stats)])
            ctime += 24.*60.*60.
            os.remove('temp' + comp)
            os.remove('tempLoad2')
        oldfiles = glob.glob('tempLoad.*')
        for curfile in oldfiles:
            os.remove(curfile)
        st.merge()
    else:
        # Here we just do the tides with no loading
        pfile = open('para_file','w')
        pfile.write(str(stime.year) + ',' + str(stime.julday) + ',0\n')
        pfile.write(str(etime.year) + ',' + str(etime.julday) + ',0\n')
        # In terms of 1 hour
        pfile.write(str(srate/(60*60)) + '\n')
        pfile.write('t\n')
        pfile.write(str(lat) + '\n')
        pfile.write(str(lon) + '\n')
        # compute gravity
        pfile.write('1\n')
        # Compute two tilt tides
        pfile.write('2\n')
        # compute no strain
        pfile.write('0\n')
        pfile.write('0\n')
        pfile.write('90\n')
        pfile.write('tempZ\n')
        pfile.write('tempN\n')
        pfile.write('tempE\n')
        pfile.close()
        cmd =  '../bin/ertid < para_file'
        os.system(cmd)
        os.remove('para_file')
        st = Stream()
        for comp in ['Z', 'N', 'E']:
            f=open('temp' + comp,'r')
            data = []
            for line in f:
                if comp == 'Z':
                    data.append(-float(line))
                else:
                    data.append(-float(line))

            f.close()
            os.remove('temp' + comp)
            data = np.asarray(data)
            stats = {'network': 'YY', 'station': sta, 'location': '',
                    'channel' : 'UH' + comp, 'npts': len(data), 'sampling_rate': 1./srate,
                    'mseed' : {'dataquality': 'D'}}
            stats['starttime'] = stime 
            st += Stream([Trace(data=data, header = stats)])
    
    return st
예제 #17
0
def preprocess(db, stations, comps, goal_day, params, responses=None):

    datafiles = {}
    output = Stream()
    for station in stations:
        datafiles[station] = {}
        net, sta = station.split('.')
        gd = datetime.datetime.strptime(goal_day, '%Y-%m-%d')
        files = get_data_availability(
            db, net=net, sta=sta, starttime=gd, endtime=gd)
        for comp in comps:
            datafiles[station][comp] = []
        for file in files:
            if file.comp[-1] not in comps:
                continue
            fullpath = os.path.join(file.path, file.file)
            datafiles[station][file.comp[-1]].append(fullpath)

    for istation, station in enumerate(stations):
        net, sta = station.split(".")
        for comp in comps:
            files = eval("datafiles['%s']['%s']" % (station, comp))
            if len(files) != 0:
                logging.debug("%s.%s Reading %i Files" %
                              (station, comp, len(files)))
                stream = Stream()
                for file in sorted(files):
                    st = read(file, dytpe=np.float,
                              starttime=UTCDateTime(gd),
                              endtime=UTCDateTime(gd)+86400)
                    tmp = st.select(network=net, station=sta, component=comp)
                    if not len(tmp):
                        for tr in st:
                            tr.stats.network = net
                        st = st.select(network=net, station=sta, component=comp)
                    else:
                        st = tmp
                    for tr in st:
                        tr.data = tr.data.astype(np.float)
                    stream += st
                    del st
                stream.sort()
                stream.merge(method=1, interpolation_samples=3, fill_value=None)
                stream = stream.split()

                logging.debug("Checking sample alignment")
                for i, trace in enumerate(stream):
                    stream[i] = check_and_phase_shift(trace)

                logging.debug("Checking Gaps")
                if len(getGaps(stream)) > 0:
                    max_gap = 10
                    only_too_long = False
                    while getGaps(stream) and not only_too_long:
                        too_long = 0
                        gaps = getGaps(stream)
                        for gap in gaps:
                            if int(gap[-1]) <= max_gap:
                                stream[gap[0]] = stream[gap[0]].__add__(stream[gap[1]], method=1,
                                                                        fill_value="interpolate")
                                stream.remove(stream[gap[1]])
                                break
                            else:
                                too_long += 1
                        if too_long == len(gaps):
                            only_too_long = True
                stream = stream.split()
                taper_length = 20.0  # seconds
                for trace in stream:
                    if trace.stats.npts < 4 * taper_length * trace.stats.sampling_rate:
                        stream.remove(trace)
                    else:
                        trace.detrend(type="demean")
                        trace.detrend(type="linear")
                        trace.taper(max_percentage=None, max_length=1.0)

                if not len(stream):
                    logging.debug(" has only too small traces, skipping...")
                    continue

                for trace in stream:
                    logging.debug(
                        "%s.%s Highpass at %.2f Hz" % (station, comp, params.preprocess_highpass))
                    trace.filter("highpass", freq=params.preprocess_highpass, zerophase=True)

                    if trace.stats.sampling_rate != params.goal_sampling_rate:
                        logging.debug(
                            "%s.%s Lowpass at %.2f Hz" % (station, comp, params.preprocess_lowpass))
                        trace.filter("lowpass", freq=params.preprocess_lowpass, zerophase=True, corners=8)

                        if params.resampling_method == "Resample":
                            logging.debug("%s.%s Downsample to %.1f Hz" %
                                          (station, comp, params.goal_sampling_rate))
                            trace.data = resample(
                                trace.data, params.goal_sampling_rate / trace.stats.sampling_rate, 'sinc_fastest')

                        elif params.resampling_method == "Decimate":
                            decimation_factor = trace.stats.sampling_rate / params.goal_sampling_rate
                            if not int(decimation_factor) == decimation_factor:
                                logging.warning("%s.%s CANNOT be decimated by an integer factor, consider using Resample or Lanczos methods"
                                                " Trace sampling rate = %i ; Desired CC sampling rate = %i" %
                                                (station, comp, trace.stats.sampling_rate, params.goal_sampling_rate))
                                sys.stdout.flush()
                                sys.exit()
                            logging.debug("%s.%s Decimate by a factor of %i" %
                                          (station, comp, decimation_factor))
                            trace.data = trace.data[::int(decimation_factor)]

                        elif params.resampling_method == "Lanczos":
                            logging.debug("%s.%s Downsample to %.1f Hz" %
                                          (station, comp, params.goal_sampling_rate))
                            trace.data = np.array(trace.data)
                            trace.interpolate(method="lanczos", sampling_rate=params.goal_sampling_rate, a=1.0)

                        trace.stats.sampling_rate = params.goal_sampling_rate

                if get_config(db, 'remove_response', isbool=True):
                    logging.debug('%s Removing instrument response'%stream[0].id)
                    response_prefilt = eval(get_config(db, 'response_prefilt'))

                    response = responses[responses["channel_id"] == stream[0].id]
                    if len(response) > 1:
                        response = response[response["start_date"]<UTCDateTime(gd)]
                        response = response[response["end_date"]>UTCDateTime(gd)]
                    elif len(response) == 0:
                        logging.info("No instrument response information "
                                     "for %s, exiting" % stream[0].id)
                        sys.exit()
                    datalesspz = response["paz"].values[0]
                    stream.simulate(paz_remove=datalesspz,
                                    remove_sensitivity=True,
                                    pre_filt=response_prefilt,
                                    paz_simulate=None, )
                for tr in stream:
                    tr.data = tr.data.astype(np.float32)
                output += stream
                del stream
            del files
    clean_scipy_cache()
    return 0, output
예제 #18
0
    def computeStepCal(self):
        # cal duration needs to be divided by 10000 for step cals only.  This
        # only applies for when you are reading the cal duration from the
        # database.
        if (self.dbconn is not None):
            # divide by 10000 when getting the cal_duration from the database
            duration = self.cal_duration / 10000.0
        else:
            duration = self.cal_duration

        # Determine the type of sensor from the metadata
        sensor = self._determineSensorType()

        # ignores every location except for Z for triaxial STS-2s
        if ((self.dbconn is not None) and ("Z" not in self.outChannel) and
            (sensor == "STS-2HG" or sensor == "STS-4B" or sensor == "STS-2")):
            print("Skipped " + str(self.outChannel) + ' ' + sensor)

        # get the poles values for the sensor type
        pz = self._pzvals(sensor)

        # read data for the calibration
        try:
            stOUT = Stream()
            stime = UTCDateTime(self.startdate) - 5 * 60
            stOUT = read(self.dataOutLoc,
                         starttime=stime,
                         endtime=stime + duration + 5 * 60 + 900)
            stOUT.merge()
            stIN = read(self.dataInLoc,
                        starttime=stime,
                        endtime=stime + duration + 5 * 60 + 900)
            stIN.merge()
            trIN = stIN[0]
            trOUT = stOUT[0]
            trOUT.filter('lowpass', freq=.1)
            trIN.filter('lowpass', freq=.1)
            trIN.detrend('constant')
            trIN.normalize()
            trOUT.detrend('constant')
            trOUT.normalize()
            temp = trOUT.copy()
            temp.trim(endtime=stime + int(duration / 2.))
            if temp.max() < 0.0:
                trOUT.data = -trOUT.data
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to read data for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable read data
                                          for manual input file ''' +
                                          str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # compute corner (cutoff) frequency
            f = 1. / (2 * math.pi / abs(pz['poles'][0]))
            # compute damping ratio
            h = abs(pz['poles'][0].real) / abs(pz['poles'][0])
            sen = 10.0

            print('Using: h=' + str(h) + ' f=' + str(f) + ' sen = ' + str(sen))

            x = numpy.array([f, h, sen])
            try:
                # compute best fit
                bf = fmin(self._resi,
                          x,
                          args=(trIN, trOUT),
                          xtol=10**-8,
                          ftol=10**-3,
                          disp=False)
            except:
                bf = x

        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to calculate {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                          perform corner freq, damping ratio,
                                          and best fit calculations for input
                                          file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            pazNOM = cornFreq2Paz(f, h)
            pazNOM['zeros'] = [0. + 0.j]

            pazPERT = cornFreq2Paz(bf[0], bf[1])
            pazPERT['zeros'] = [0]

            trOUTsimPert = trOUT.copy()
            trOUTsimPert.simulate(paz_remove=pazPERT)
            trOUTsimPert.trim(trOUTsimPert.stats.starttime + 50,
                              trOUTsimPert.stats.endtime - 50)
            trOUTsimPert.detrend('constant')
            trOUTsimPert.normalize()

            trOUTsim = trOUT.copy()

            trOUTsim.simulate(paz_remove=pazNOM)
            trOUTsim.trim(trOUTsim.stats.starttime + 50,
                          trOUTsim.stats.endtime - 50)
            trOUTsim.detrend('constant')
            trOUTsim.normalize()

            trIN.trim(trIN.stats.starttime + 50, trIN.stats.endtime - 50)
            trIN.detrend('constant')
            trIN.normalize()

            compOUT = sum((trOUTsim.data - trIN.data)**2)
            compOUTPERT = sum((trOUTsimPert.data - trIN.data)**2)
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to do calculation for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                         perform poles calculation or input
                                         file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # create a plot for the step calibration and save it to the ./temp
            # directory.  This directory will be deleted when the program is
            # finished running.
            plt.clf()
            t = numpy.arange(
                0, trOUTsim.stats.npts / trOUTsim.stats.sampling_rate,
                trOUTsim.stats.delta)
            plt.plot(t, trIN.data, 'b', label='input')
            plt.plot(t,
                     trOUTsim.data,
                     'k',
                     label='h=' + str(round(h, 6)) + ' f=' + str(round(f, 6)) +
                     ' resi=' + str(round(compOUT, 6)))
            plt.plot(t,
                     trOUTsimPert.data,
                     'g',
                     label='h=' + str(round(bf[1], 6)) + ' f=' +
                     str(round(bf[0], 6)) + ' resi=' +
                     str(round(compOUTPERT, 6)))
            plt.xlabel('Time (s)')
            plt.ylabel('Cnts normalized')
            plt.title('Step Calibration ' + trOUT.stats.station + ' ' +
                      str(trOUT.stats.starttime.year) + ' ' +
                      str(trOUT.stats.starttime.julday).zfill(3))
            plt.legend(prop={'size': 6})
            plt.savefig('temp/' + str(trOUT.stats.station) +
                        str(self.outChannel) + str(self.location) +
                        str(self.startdate.year) + str(self.julianday) +
                        'step.png',
                        format="png",
                        dpi=400)
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to plot {' + 'network = ' +
                                          self.network + ', station = ' +
                                          self.station + ', sensor = ' +
                                          str(sensor) + ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error(
                    '(Manual Override) Unable to make plot for input file ' +
                    str(self.dataInLoc) + ' and output file ' +
                    str(self.dataOutLoc))
        if (self.dbconn is not None):
            try:
                plt.close()
                # insert results into the database
                fin = open(
                    'temp/' + str(trOUT.stats.station) + str(self.outChannel) +
                    str(self.location) + str(self.startdate.year) +
                    str(self.julianday) + 'step.png', 'rb')
                imgdata = fin.read()
                cur = self.dbconn.cursor()
                cur.execute(
                    '''INSERT INTO tbl_300calresults (fk_calibrationid,
                              nominal_cornerfreq, nominal_dampingratio, nominal_resi,
                              fitted_cornerfreq, fitted_dampingratio, fitted_resi,
                              outchannel, stepcal_img)
                              VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)''', [
                        self.cal_id,
                        round(f, 6),
                        round(h, 6),
                        round(compOUT, 6),
                        round(bf[0], 6),
                        round(bf[1], 6),
                        round(compOUTPERT, 6),
                        str(self.outChannel),
                        psycopg2.Binary(imgdata)
                    ])
                self.dbconn.commit()
            except:
                self.stepcal_logger.error(
                    'Unable to insert into database for {' + 'network = ' +
                    self.network + ', station = ' + self.station +
                    ', sensor = ' + str(sensor) + ', location = ' +
                    str(self.location) + ', channel = ' +
                    str(self.outChannel) + '}')

        else:
            try:
                print('nominal corner freq = ' + str(round(f, 6)) +
                      ', nominal damping ratio = ' + str(round(h, 6)) +
                      ', nominal best fit = ' + str(round(compOUT, 6)) +
                      ', fitted corner freq = ' + str(round(bf[0], 6)) +
                      ', fitted damping ratio = ' + str(round(bf[1], 6)) +
                      ', pert best fit ' + str(round(compOUTPERT, 6)))
                plt.show()
                plt.close()
            except:
                print(
                    '(Manual Override) Error displaying calculation results.')
예제 #19
0
파일: waveform.py 프로젝트: obspy/branches
    def __plotStraight(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Just plots the data samples in the self.stream. Useful for smaller
        datasets up to around 1000000 samples (depending on the machine its
        being run on).

        Slow and high memory consumption for large datasets.
        """
        # Copy to avoid any changes to original data.
        trace = deepcopy(trace)
        if len(trace) > 1:
            stream = Stream(traces=trace)
            # Merge with 'interpolation'. In case of overlaps this method will
            # always use the longest available trace.
            if hasattr(trace[0].stats, 'preview') and trace[0].stats.preview:
                stream = Stream(traces=stream)
                stream = mergePreviews(stream)
            else:
                stream.merge(method=1)
            trace = stream[0]
        else:
            trace = trace[0]
        # Check if it is a preview file and adjust accordingly.
        # XXX: Will look weird if the preview file is too small.
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            # Mask the gaps.
            trace.data = np.ma.masked_array(trace.data)
            trace.data[trace.data == -1] = np.ma.masked
            # Recreate the min_max scene.
            dtype = trace.data.dtype
            old_time_range = trace.stats.endtime - trace.stats.starttime
            data = np.empty(2 * trace.stats.npts, dtype=dtype)
            data[0::2] = trace.data / 2.0
            data[1::2] = -trace.data / 2.0
            trace.data = data
            # The times are not supposed to change.
            trace.stats.delta = old_time_range / float(trace.stats.npts - 1)
        # Write to self.stats.
        calib = trace.stats.calib
        max = trace.data.max()
        min = trace.data.min()
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            tr_id = trace.id + ' [preview]'
        else:
            tr_id = trace.id
        self.stats.append([tr_id, calib * trace.data.mean(),
                           calib * min, calib * max])
        # Pad the beginning and the end with masked values if necessary. Might
        # seem like overkill but it works really fast and is a clean solution
        # to gaps at the beginning/end.
        concat = [trace]
        if self.starttime != trace.stats.starttime:
            samples = (trace.stats.starttime - self.starttime) * \
                trace.stats.sampling_rate
            temp = [np.ma.masked_all(int(samples))]
            concat = temp.extend(concat)
            concat = temp
        if self.endtime != trace.stats.endtime:
            samples = (self.endtime - trace.stats.endtime) * \
                      trace.stats.sampling_rate
            concat.append(np.ma.masked_all(int(samples)))
        if len(concat) > 1:
            # Use the masked array concatenate, otherwise it will result in a
            # not masked array.
            trace.data = np.ma.concatenate(concat)
            # set starttime and calculate endtime
            trace.stats.starttime = self.starttime
        trace.data *= calib
        ax.plot(trace.data, color=self.color)
        # Set the x limit for the graph to also show the masked values at the
        # beginning/end.
        ax.set_xlim(0, len(trace.data) - 1)
예제 #20
0
    def getWaveform(self,
                    startTime,
                    endTime,
                    scnl):
        ''' Get the waveform data for the specified parameters.

        Parameters
        ----------
        startTime : UTCDateTime
            The begin datetime of the data to fetch.

        endTime : UTCDateTime
            The end datetime of the data to fetch.

        scnl : List of tuples
            The SCNL codes of the data to request.


        Returns
        -------
        stream : :class:`obspy.core.Stream`
            The requested waveform data. All traces are packed into one stream.
        '''
        from obspy.core import Stream

        self.logger.debug("Querying...")
        self.logger.debug('startTime: %s', startTime)
        self.logger.debug('endTime: %s', endTime)
        self.logger.debug("%s", scnl)

        stream = Stream()
        for curScnl in scnl:
            curStation = curScnl[0]
            curChannel = curScnl[1]
            curNetwork = curScnl[2]
            curLocation = curScnl[3]

            stock_stream = self.get_from_stock(station = curStation,
                                               channel = curChannel,
                                               network = curNetwork,
                                               location = curLocation,
                                               start_time = startTime,
                                               end_time = endTime)

            if len(stock_stream) > 0:
                cur_trace = stock_stream.traces[0]
                cur_start_time = cur_trace.stats.starttime
                cur_end_time = cur_trace.stats.starttime + old_div(cur_trace.stats.npts, cur_trace.stats.sampling_rate)

                stream += stock_stream

                if startTime < cur_start_time:
                    curStream = self.request_from_server(station = curStation,
                                                         channel = curChannel,
                                                         network = curNetwork,
                                                         location = curLocation,
                                                         start_time = startTime,
                                                         end_time = cur_start_time)
                    stream += curStream

                if cur_end_time < endTime:
                    curStream = self.request_from_server(station = curStation,
                                                         channel = curChannel,
                                                         network = curNetwork,
                                                         location = curLocation,
                                                         start_time = cur_end_time,
                                                         end_time = endTime)
                    stream += curStream

            else:
                curStream = self.request_from_server(station = curStation,
                                                     channel = curChannel,
                                                     network = curNetwork,
                                                     location = curLocation,
                                                     start_time = startTime,
                                                     end_time = endTime)
                stream += curStream

            stream.merge()

        self.add_to_stock(stream)

        return stream
예제 #21
0
       comps = ['Z']
       tramef_Z= np.zeros((len(stations),len(TimeVec)))
 
   
   j = 0
   for istation, station in enumerate(stations):
       for comp in comps:
           files = eval("datafiles%s['%s']"%(comp,station))
           if len(files) != 0:
               logging.debug("%s.%s Reading %i Files" % (station, comp, len(files)))
               stream = Stream()
               for file in sorted(files):
                   st = read(file,format="MSEED")
                   stream += st
                   del st
               stream.merge()
               stream = stream.split()
               for trace in stream:
                   data = trace.data
                   if len(data) > 2:
                       tp = cosTaper(len(data), 0.01 )
                       data -= np.mean(data)
                       data *= tp
                       trace.data = data
                   else:
                       trace.data *= 0
                   del data
               logging.debug("%s.%s Merging Stream" % (station, comp))
               stream.merge(fill_value=0) #fills gaps with 0s and gives only one 'Trace'
               logging.debug("%s.%s Slicing Stream to %s:%s" % (station, comp,utcdatetime.UTCDateTime(goal_day.replace('-','')),utcdatetime.UTCDateTime(goal_day.replace('-',''))+goal_duration-stream[0].stats.delta))
               
예제 #22
0
                                     tlen_bef)
        # print("ori == ", ori)

        if Flag_Read_Stats == 1 and ch_id != "None":
            ori = ori - time_shift

        # get channel id
        chan = tt.stats.channel
        netwk = tt.stats.network
        idchan = netwk + "." + sstat + ".." + chan
        idchan_dic = netwk + "." + sstat + "." + chan
        # print("idchan_dic == ", idchan_dic)

        if st_cont.select(id=idchan).__nonzero__():
            st1_cont = st_cont.select(id=idchan)
            st1_cont.merge()
            # print(st1_cont.__str__(extended=True))
            tc = st1_cont[0]
            # print("Trace Id.i= ", st1_cont[0])
            tc.trim(
                starttime=UTCDateTime(detection_otime),
                endtime=UTCDateTime(detection_otime) + 2 * det_dur,
                pad=True,
                nearest_sample=True,
                fill_value=0,
            )
            # print("tc.stats.starttime == ", tc.stats.starttime)
            # print("detection_otime == ", UTCDateTime(detection_otime))

            # compute maximum amplitude in the continuous waveforms
            # for magnitude estimation
예제 #23
0
    def getWaveform(self, startTime, endTime, scnl):
        ''' Get the waveform data for the specified parameters.

        Parameters
        ----------
        startTime : UTCDateTime
            The begin datetime of the data to fetch.

        endTime : UTCDateTime
            The end datetime of the data to fetch.

        scnl : List of Tuples (STATION, CHANNEL, NETWORK, LOCATION)
            The channels for which to get the waveform data.

        Returns
        -------
        stream : :class:`obspy.core.Stream`
            The requested waveform data. All traces are packed into one stream.
        '''
        self.logger.debug("Getting the waveform for SCNL: %s from %s to %s...", scnl, startTime.isoformat(), endTime.isoformat())

        stream = Stream()

        # Trim the stock stream to new limits.
        self.trim_stock(start_time = startTime, end_time = endTime)

        # Filter the SCNL selections.
        if scnl:
            for stat, chan, net, loc in scnl:
                stock_stream = self.get_from_stock(station = stat,
                                                   channel = chan,
                                                   network = net,
                                                   location = loc,
                                                   start_time = startTime,
                                                   end_time = endTime)

                if len(stock_stream) > 0:
                    self.logger.debug('Found data in stock....\n%s', stock_stream)
                    stock_stream.merge()
                    cur_trace = stock_stream.traces[0]
                    cur_start_time = cur_trace.stats.starttime
                    cur_end_time = cur_trace.stats.starttime + old_div(cur_trace.stats.npts, cur_trace.stats.sampling_rate)

                    stream += stock_stream.split()

                    if (cur_start_time - startTime) > old_div(1,cur_trace.stats.sampling_rate):
                        self.logger.debug('Get missing data in front...')
                        self.logger.debug('Loading data from %s to %s.', startTime, cur_start_time)
                        curStream = self.load_from_file(station = stat,
                                                        channel = chan,
                                                        network = net,
                                                        location = loc,
                                                        start_time = startTime,
                                                        end_time = cur_start_time)
                        stream += curStream

                    if (endTime - cur_end_time) > old_div(1,cur_trace.stats.sampling_rate):
                        self.logger.debug('Get missing data in back...')
                        self.logger.debug('Loading data from %s to %s.', cur_end_time, endTime)
                        curStream = self.load_from_file(station = stat,
                                                        channel = chan,
                                                        network = net,
                                                        location = loc,
                                                        start_time = cur_end_time,
                                                        end_time = endTime)
                        stream += curStream

                    if isinstance(stock_stream.traces[0].data, np.ma.masked_array):
                        # Try to fill the data gaps.
                        stock_stream = stock_stream.split()
                        gaps = stock_stream.get_gaps()
                        if len(gaps) > 0:
                            self.logger.debug('There are gaps in the stock stream. Try to fill them...')
                        for cur_gap in gaps:
                            if cur_gap in self.stock_data_gaps:
                                self.logger.debug("The gap %s is part of a miniseed file. Don't reload the data.", cur_gap)
                            else:
                                self.logger.debug('Loading data for gap %s.', cur_gap)
                                curStream = self.load_from_file(station = stat,
                                                                channel = chan,
                                                                network = net,
                                                                location = loc,
                                                                start_time = cur_gap[4],
                                                                end_time = cur_gap[5])
                                stream += curStream


                else:
                    self.logger.debug('No stock data available...')
                    self.logger.debug('Loading data from %s to %s.', startTime, endTime)
                    curStream = self.load_from_file(station = stat,
                                                    channel = chan,
                                                    network = net,
                                                    location = loc,
                                                    start_time = startTime,
                                                    end_time = endTime)

                    stream += curStream

                stream.merge()

        # Trim the stream to the requested time span using only the samples
        # inside the time span.
        stream = stream.trim(starttime = startTime,
                             endtime = endTime,
                             nearest_sample = False)

        self.logger.debug("....finished getting the waveform.")

        return stream
예제 #24
0
    def computeStepCal(self):
        # cal duration needs to be divided by 10000 for step cals only.  This
        # only applies for when you are reading the cal duration from the
        # database.
        if(self.dbconn is not None):
            # divide by 10000 when getting the cal_duration from the database
            duration = self.cal_duration / 10000.0
        else:
            duration = self.cal_duration

        # Determine the type of sensor from the metadata
        sensor = self._determineSensorType()

        # ignores every location except for Z for triaxial STS-2s
        if((self.dbconn is not None) and ("Z" not in self.outChannel) and
           (sensor == "STS-2HG" or sensor == "STS-4B" or sensor == "STS-2")):
            print("Skipped " + str(self.outChannel) + ' ' + sensor)

        # get the poles values for the sensor type
        pz = self._pzvals(sensor)

        # read data for the calibration
        try:
            stOUT = Stream()
            stime = UTCDateTime(self.startdate) - 5 * 60
            stOUT = read(
                self.dataOutLoc, starttime=stime,
                endtime=stime + duration + 5 * 60 + 900
            )
            stOUT.merge()
            stIN = read(
                self.dataInLoc, starttime=stime,
                endtime=stime + duration + 5 * 60 + 900
            )
            stIN.merge()
            trIN = stIN[0]
            trOUT = stOUT[0]
            trOUT.filter('lowpass', freq=.1)
            trIN.filter('lowpass', freq=.1)
            trIN.detrend('constant')
            trIN.normalize()
            trOUT.detrend('constant')
            trOUT.normalize()
            temp = trOUT.copy()
            temp.trim(endtime=stime + int(duration / 2.))
            if temp.max() < 0.0:
                trOUT.data = -trOUT.data
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to read data for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable read data
                                          for manual input file ''' +
                                          str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # compute corner (cutoff) frequency
            f = 1. / (2 * math.pi / abs(pz['poles'][0]))
            # compute damping ratio
            h = abs(pz['poles'][0].real) / abs(pz['poles'][0])
            sen = 10.0

            print (
                'Using: h=' + str(h) + ' f=' + str(f) + ' sen = ' + str(sen))

            x = numpy.array([f, h, sen])
            try:
                # compute best fit
                bf = fmin(self._resi, x, args=(trIN, trOUT),
                          xtol=10 ** -8, ftol=10 ** -3, disp=False)
            except:
                bf = x

        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to calculate {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                          perform corner freq, damping ratio,
                                          and best fit calculations for input
                                          file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            pazNOM = cornFreq2Paz(f, h)
            pazNOM['zeros'] = [0. + 0.j]

            pazPERT = cornFreq2Paz(bf[0], bf[1])
            pazPERT['zeros'] = [0]

            trOUTsimPert = trOUT.copy()
            trOUTsimPert.simulate(paz_remove=pazPERT)
            trOUTsimPert.trim(
                trOUTsimPert.stats.starttime + 50, trOUTsimPert.stats.endtime - 50)
            trOUTsimPert.detrend('constant')
            trOUTsimPert.normalize()

            trOUTsim = trOUT.copy()

            trOUTsim.simulate(paz_remove=pazNOM)
            trOUTsim.trim(
                trOUTsim.stats.starttime + 50, trOUTsim.stats.endtime - 50)
            trOUTsim.detrend('constant')
            trOUTsim.normalize()

            trIN.trim(trIN.stats.starttime + 50, trIN.stats.endtime - 50)
            trIN.detrend('constant')
            trIN.normalize()

            compOUT = sum((trOUTsim.data - trIN.data) ** 2)
            compOUTPERT = sum((trOUTsimPert.data - trIN.data) ** 2)
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to do calculation for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                         perform poles calculation or input
                                         file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # create a plot for the step calibration and save it to the ./temp
            # directory.  This directory will be deleted when the program is
            # finished running.
            plt.clf()
            t = numpy.arange(
                0, trOUTsim.stats.npts / trOUTsim.stats.sampling_rate, trOUTsim.stats.delta)
            plt.plot(t, trIN.data, 'b', label='input')
            plt.plot(t, trOUTsim.data, 'k', label='h=' + str(round(h, 6)) +
                     ' f=' + str(round(f, 6)) + ' resi=' + str(round(compOUT, 6)))
            plt.plot(t, trOUTsimPert.data, 'g', label='h=' + str(round(bf[1], 6)) + ' f=' + str(
                round(bf[0], 6)) + ' resi=' + str(round(compOUTPERT, 6)))
            plt.xlabel('Time (s)')
            plt.ylabel('Cnts normalized')
            plt.title('Step Calibration ' + trOUT.stats.station + ' ' + str(
                trOUT.stats.starttime.year) + ' ' + str(trOUT.stats.starttime.julday).zfill(3))
            plt.legend(prop={'size': 6})
            plt.savefig('temp/' + str(trOUT.stats.station) + str(self.outChannel) + str(self.location) +
                        str(self.startdate.year) + str(self.julianday) + 'step.png', format="png", dpi=400)
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to plot {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('(Manual Override) Unable to make plot for input file ' + str(
                    self.dataInLoc) + ' and output file ' + str(self.dataOutLoc))
        if(self.dbconn is not None):
            try:
                plt.close()
                # insert results into the database
                fin = open('temp/' + str(trOUT.stats.station) + str(self.outChannel) + str(
                    self.location) + str(self.startdate.year) + str(self.julianday) + 'step.png', 'rb')
                imgdata = fin.read()
                cur = self.dbconn.cursor()
                cur.execute('''INSERT INTO tbl_300calresults (fk_calibrationid,
                              nominal_cornerfreq, nominal_dampingratio, nominal_resi,
                              fitted_cornerfreq, fitted_dampingratio, fitted_resi,
                              outchannel, stepcal_img)
                              VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)''',
                            [self.cal_id, round(f, 6), round(h, 6),
                             round(compOUT, 6), round(bf[0], 6),
                             round(bf[1], 6), round(compOUTPERT, 6),
                             str(self.outChannel), psycopg2.Binary(imgdata)])
                self.dbconn.commit()
            except:
                self.stepcal_logger.error('Unable to insert into database for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')

        else:
            try:
                print('nominal corner freq = ' + str(round(f, 6)) +
                      ', nominal damping ratio = ' + str(round(h, 6)) +
                      ', nominal best fit = ' + str(round(compOUT, 6)) +
                      ', fitted corner freq = ' + str(round(bf[0], 6)) +
                      ', fitted damping ratio = ' + str(round(bf[1], 6)) +
                      ', pert best fit ' + str(round(compOUTPERT, 6)))
                plt.show()
                plt.close()
            except:
                print(
                    '(Manual Override) Error displaying calculation results.')
예제 #25
0
class Seedlink_plotter(SLClient):
    """
    This module plots realtime seismic data from a Seedlink server
    """

    def __init__(self, figure, canvas, interval, backtrace, args):

        # Set the log level to display minimal info
        super(Seedlink_plotter, self).__init__(loglevel='CRITICAL')
#         super(Seedlink_plotter, self).__init__()
        self.figure = figure
        self.stream = Stream()
        self.interval = interval
        self.backtrace = backtrace
        self.canvas = canvas
        self.flip = 0
        self.scale = args.scale
        self.args = args
        self.initial_update_rate = 800
        self.update_rate = 2
        # Plot after geting the penultimate line of data
        self.print_percentage = (
            self.backtrace-60.0*self.interval)/self.backtrace
        self.print_max = (self.backtrace-60.0*self.interval)
        widgets = [FormatLabel('Receiving Data: - '), BouncingBar(
            marker=RotatingMarker())]
        self.pbar = ProgressBar(maxval=self.print_max, widgets=widgets).start()
#         print "max "+ str(self.print_max)

        # converter for the colors gradient
    def rgb_to_hex(self, r, g, b):
        return '#%02X%02X%02X' % (r, g, b)

        # Rainbow color generator
    def rainbow_color_generator(self, max_color):
        color_list = []
        frequency = 0.3
        for compteur_lignes in xrange(max_color):
            red = sin(frequency*compteur_lignes*2 + 0)*127+128
            green = sin(frequency*compteur_lignes*2 + 2)*127+128
            blue = sin(frequency*compteur_lignes*2 + 4)*127+128

            color_list.append(self.rgb_to_hex(red, green, blue))

        return tuple(color_list)

    def plot_graph(self):

        #######################################################################
        # filter section
        #######################################################################
        self.local_stream = self.stream.copy()
        # Filter example
#         self.local_stream.filter('bandpass', freqmin=0.001, freqmax=0.5,corners=2, zerophase=True)
        #######################################################################

        # With this upscale factor the graph  look nice !
        upscale_factor = 30

        if args.rainbow:
            # Rainbow colors !
            self.color = self.rainbow_color_generator(
                int(args.nb_rainbow_colors))
        else:
            # Regular colors
            self.color = ('#000000', '#ff0000', '#0000ff', '#56a83c')

        self.local_stream.plot(
            fig=self.figure, type='dayplot', interval=self.interval,
            number_of_ticks=13, tick_format='%d/%m %Hh',
            size=(args.x_size * upscale_factor, args.y_size * upscale_factor),
            x_labels_size=8,
            y_labels_size=8, title=self.title, title_size=14, linewidth=0.5, right_vertical_labels=False,
            vertical_scaling_range=self.scale,
            subplots_adjust_left=0.03, subplots_adjust_right=0.99,
            subplots_adjust_top=0.95, subplots_adjust_bottom=0.1,
            one_tick_per_line=True,
            # noir  Rouge bleu vert
            color = self.color,
            show_y_UTC_label=False)

    def packetHandler(self, count, slpack):
        """
        Processes each packet received from the SeedLinkConnection.
        :type count: int
        :param count:  Packet counter.
        :type slpack: :class:`~obspy.seedlink.SLPacket`
        :param slpack: packet to process.
        :return: Boolean true if connection to SeedLink server should be
            closed and session terminated, false otherwise.
        """

        # check if not a complete packet
        if slpack is None or (slpack == SLPacket.SLNOPACKET) or \
                (slpack == SLPacket.SLERROR):
            return False

        # get basic packet info
        type = slpack.getType()

        # process INFO packets here
        if (type == SLPacket.TYPE_SLINF):
            return False
        if (type == SLPacket.TYPE_SLINFT):
#             print "Complete INFO:\n" + self.slconn.getInfoString()
            if self.infolevel is not None:
                return True
            else:
                return False

        # process packet data
        trace = slpack.getTrace()
        if trace is None:
            print self.__class__.__name__ + ": blockette contains no trace"
            return False

        # new samples add to the main stream
        self.stream += trace
        self.stream.merge()

        now = UTCDateTime()

        # Stop time will be the next round date
        stop_time = UTCDateTime(
            now.year, now.month, now.day, now.hour, 0, 0)+3600
        start_time = stop_time-self.backtrace

        # Limit the stream size
        self.stream = self.stream.slice(start_time, stop_time)
        self.stream.trim(start_time, stop_time)

        self.title = self.stream.traces[0].stats.station+" "+self.stream.traces[0].stats.network+" "+self.stream.traces[
            0].stats.location+" "+self.stream.traces[0].stats.channel+' scale: '+str(self.scale) + " - non filtre"

        stream_time_length = self.stream.traces[
            0].stats.endtime - self.stream.traces[0].stats.starttime

        ### Before we reach  print_percentage of the time data to plot, we plot each initial_update_rate we received
#         if (stream_time_length < (self.backtrace*self.print_percentage)):
#        if ((stream_time_length))<(self.backtrace-60.0*self.interval):
#         print str(stream_time_length)+"/"+str(self.print_max)
        if stream_time_length <= self.print_max:

            self.flip += 1

#             if ((stream_time_length))<(self.backtrace-60.0*self.interval):
            self.pbar.update(stream_time_length+1)
#             print str(stream_time_length)+"/"+str(self.print_max)
            if (self.flip > self.initial_update_rate):
                self.flip = 0
                self.figure.clear()
                self.plot_graph()

 #             self.pbar.finish()

        # Real time plotting
        # We plot each update_rate packet we received
        # if (stream_time_length >= (self.backtrace*self.print_percentage)):
        if ((stream_time_length)) > (self.print_max):
#             print str(stream_time_length)+"/"+str(self.print_max)

            self.flip += 1
            if (self.flip > self.update_rate):
                self.figure.clear()
                self.plot_graph()
                self.flip = 0

        return False
예제 #26
0
         comps = ['Z']
         tramef_Z = np.zeros((len(stations), len(TimeVec)))
 
     j = 0
     for istation, station in enumerate(stations):
         for comp in comps:
             files = eval("datafiles%s['%s']" % (comp, station))
             if len(files) != 0:
                 logging.debug("%s.%s Reading %i Files" %
                               (station, comp, len(files)))
                 stream = Stream()
                 for file in sorted(files):
                     st = read(file, format="MSEED")
                     stream += st
                     del st
                 stream.merge()
                 stream = stream.split()
                 for trace in stream:
                     data = trace.data
                     if len(data) > 2:
                         trace.detrend("demean")
                         trace.taper(0.01)
                     else:
                         trace.data *= 0
                     del data
                 logging.debug("%s.%s Merging Stream" % (station, comp))
                 #fills gaps with 0s and gives only one 'Trace'
                 stream.merge(fill_value=0)
                 logging.debug("%s.%s Slicing Stream to %s:%s" % (station, comp, utcdatetime.UTCDateTime(
                     goal_day.replace('-', '')), utcdatetime.UTCDateTime(goal_day.replace('-', '')) + goal_duration - stream[0].stats.delta))
 
예제 #27
0
    path = '/tr1/telemetry_days/' + net2 + '_' + sta2 + '/' + str(
        ctime.year) + '/' + str(ctime.year) + '_' + str(
            ctime.julday).zfill(3) + '/' + loc2 + '_' + chan2 + '*'
    pathws = '/tr1/telemetry_days/' + net + '_' + sta + '/' + str(
        ctime.year) + '/' + str(ctime.year) + '_' + str(
            ctime.julday).zfill(3) + '/50_LWS*'
    st += read(path)
    stws += read(pathws)
    ctime += 24. * 60. * 60.

if debug:
    print(pathws)
    print(st)

# Merge traces and if there's a data gap, make it zero. Then trim stream between start and end time
st.merge(fill_value=0.)
stws.merge(fill_value=0.)
st.trim(stime, etime)
stws.trim(stime, etime)

# Removing wind speed response and converting trace to an array
for tr in stws:
    tr.data = tr.data.astype(np.float64)
    tr.data *= 0.1

# Creating spectrogram
specgram, freq, time = mlab.specgram(st[0].data,
                                     Fs=st[0].stats.sampling_rate,
                                     NFFT=nfft,
                                     pad_to=nfft * 2,
                                     noverlap=int(0.25 * nfft),
예제 #28
0
	debug = False

#Get the response
if debug:
	print 'Here is the sensor:' + parserval.sensorType
pazval = getpaz(parserval.sensorType)

#Read in the data
try:	
	st = Stream()
	for dataString in parserval.data:
		if debug:
			print 'Reading in the data trace:' + dataString
		st += read(dataString)

	st.merge()
except:
	'Unable to read the data'
	sys.exit()

if debug:
	for tr in st:
		print 'Here is the data stream: ' + str(tr)		
		
	print 'Here is the window length of your PSDs: ' + str(parserval.len)
	print 'Here is the overlap: ' + str(parserval.overlap)


#Make the PDF
ppsd = PPSD(st[0].stats,paz=pazval,ppsd_length=parserval.len,overlap=parserval.overlap)
for tr in st:
예제 #29
0
class sdschunk(chunk):
	"""
	This class walks a SDS archive by incremental time and
	handle the synchronization of GAPS between traces supplied.
	
	One class handle one stream, but, while the get() method is called
	a list of streams can be passed what allows for getting a syncronized
	list of traces and streams.
	
	The return of the get() method is True or False. True when data was
	added to others and False when no data was added to others.
	"""
	def __init__(self, path, N, S, L, C, verbose = False):
		super().__init__(N, S, L, C, verbose)
		
		if not os.path.isdir(path):
			log(" Bad path to SDS", level=2, verbose=verbose)
		
		self.path = path
		self.N = N
		self.S = S
		self.L = L
		self.C = C
		self._verbose = verbose
		
		self._S = Stream()
		self._last_time = None
		self._visited = []

		log(f" I:> New SDSCHUNK {N}.{S}.{L}.{C} @ {self.path}", level=0,
					verbose=verbose)

	def _make_path(self, d):
		path = "%s/%04d/%s/%s/%s.D/%s.%s.%s.%s.D.%04d.%03d" % (self.path,
					d.year, self.N, self.S, self.C,self.N, self.S, self.L,
					self.C, d.year, d.julday)

		if path in self._visited: return None
		self._visited.append(path)

		return path

	def _update(self, s, e):
		s2 = (UTCDateTime(s.date) - 86400 / 2) if UTCDateTime(s).hour == 0 and\
					UTCDateTime(s).minute < 30 else UTCDateTime(s.date)
		e2 = (UTCDateTime(e.date) + 2 * 86400) if UTCDateTime(e).hour == 23 and\
					UTCDateTime(e).minute > 30 else (UTCDateTime(e.date) + 1*86400)

		while s2 < e2:
			self._extend(self._make_path(s2))
			s2 += 86400

		self._clean(s)

		return self._last_time

	def _extend(self, filename):
		if filename is None or not os.path.isfile(filename): return False

		self._S.extend(read(filename))
		self._S.merge(method = -1)
		self._S.sort()

		log(f" W:> READ: {filename}", level=0, verbose=self._verbose)

		self._last_time = self._S[-1].stats.endtime

		return True
예제 #30
0
def main():
    db = connect()

    logging.basicConfig(level=logging.INFO,
                        format='%(asctime)s [%(levelname)s] %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')

    logging.info('*** Starting: Compute SARA_RATIO ***')

    while is_next_job(db, jobtype='SARA_RATIO'):
        t0 = time.time()
        jobs = get_next_job(db, jobtype='SARA_RATIO')
        stations = []
        pairs = []
        refs = []

        for job in jobs:
            refs.append(job.ref)
            pairs.append(job.pair)
            netsta1, netsta2 = job.pair.split(':')
            stations.append(netsta1)
            stations.append(netsta2)
            goal_day = job.day

        stations = np.unique(stations)

        logging.info("New SARA Job: %s (%i pairs with %i stations)" %
                     (goal_day, len(pairs), len(stations)))

        logging.debug(
            "Preloading all envelopes and applying site and sensitivity")
        all = {}
        for station in stations:
            tmp = get_sara_param(db, station)
            sensitivity = tmp.sensitivity
            site_effect = tmp.site_effect
            try:
                tmp = read(
                    os.path.join("SARA", "ENV", station,
                                 "%s.MSEED" % goal_day))
            except:
                logging.debug("Error reading %s:%s" % (station, goal_day))
                continue
            for trace in tmp:
                trace.data /= (sensitivity * site_effect)
            all[station] = tmp

        logging.debug("Computing all pairs")
        for job in jobs:
            netsta1, netsta2 = job.pair.split(':')
            net1, sta1 = netsta1.split(".")
            net2, sta2 = netsta2.split(".")
            trace = Trace()
            if netsta1 not in all or netsta2 not in all:
                update_job(db,
                           job.day,
                           job.pair,
                           'SARA_RATIO',
                           'D',
                           ref=job.ref)
                continue
            tmp = Stream()
            for tr in all[netsta1]:
                tmp += tr
            for tr in all[netsta2]:
                tmp += tr
            # tmp = Stream(traces=[all[netsta1], all[netsta2]])
            # print(tmp)
            tmp.merge()
            tmp = make_same_length(tmp)
            tmp.merge(fill_value=np.nan)
            if len(tmp) > 1:
                trace.data = tmp.select(network=net1, station=sta1)[0].data / \
                             tmp.select(network=net2, station=sta2)[0].data
                trace.stats.starttime = tmp[0].stats.starttime
                trace.stats.delta = tmp[0].stats.delta

                env_output_dir = os.path.join('SARA', 'RATIO',
                                              job.pair.replace(":", "_"))
                if not os.path.isdir(env_output_dir):
                    os.makedirs(env_output_dir)
                trace.write(os.path.join(env_output_dir, goal_day + '.MSEED'),
                            format="MSEED",
                            encoding="FLOAT32")

            update_job(db, job.day, job.pair, 'SARA_RATIO', 'D', ref=job.ref)
            del tmp
        logging.info("Done. It took %.2f seconds" % (time.time() - t0))
예제 #31
0
    def getWaveform(self, startTime, endTime, scnl):
        ''' Get the waveform data for the specified parameters.

        Parameters
        ----------
        startTime : UTCDateTime
            The begin datetime of the data to fetch.

        endTime : UTCDateTime
            The end datetime of the data to fetch.

        scnl : List of tuples
            The SCNL codes of the data to request.


        Returns
        -------
        stream : :class:`obspy.core.Stream`
            The requested waveform data. All traces are packed into one stream.
        '''
        from obspy.core import Stream

        #self.logger.debug("Querying...")
        #self.logger.debug('startTime: %s', startTime)
        #self.logger.debug('endTime: %s', endTime)
        #self.logger.debug("%s", scnl)
        self.logger.debug("Getting the waveform for SCNL: %s from %s to %s...", scnl, startTime.isoformat(), endTime.isoformat())

        stream = Stream()

        # Trim the stock stream to new limits.
        self.trim_stock(start_time = startTime, end_time = endTime)

        for curScnl in scnl:
            curStation = curScnl[0]
            curChannel = curScnl[1]
            curNetwork = curScnl[2]
            curLocation = curScnl[3]


            stock_stream = self.get_from_stock(station = curStation,
                                               channel = curChannel,
                                               network = curNetwork,
                                               location = curLocation,
                                               start_time = startTime,
                                               end_time = endTime)

            if len(stock_stream) > 0:
                cur_trace = stock_stream.traces[0]
                cur_start_time = cur_trace.stats.starttime
                cur_end_time = cur_trace.stats.starttime + old_div(cur_trace.stats.npts, cur_trace.stats.sampling_rate)

                stream += stock_stream.split()

                if (cur_start_time - startTime) > old_div(1,cur_trace.stats.sampling_rate):
                    curStream = self.request_from_server(station = curStation,
                                                         channel = curChannel,
                                                         network = curNetwork,
                                                         location = curLocation,
                                                         start_time = startTime,
                                                         end_time = cur_start_time)
                    stream += curStream

                if (endTime - cur_end_time) > old_div(1,cur_trace.stats.sampling_rate):
                    curStream = self.request_from_server(station = curStation,
                                                         channel = curChannel,
                                                         network = curNetwork,
                                                         location = curLocation,
                                                         start_time = cur_end_time,
                                                         end_time = endTime)
                    stream += curStream

            else:
                curStream = self.request_from_server(station = curStation,
                                                     channel = curChannel,
                                                     network = curNetwork,
                                                     location = curLocation,
                                                     start_time = startTime,
                                                     end_time = endTime)
                stream += curStream

            self.logger.debug('Merging stream.')
            stream.merge()

        # Trim the stream to the requested time span using only the samples
        # inside the time span.
        stream = stream.trim(starttime = startTime,
                             endtime = endTime,
                             nearest_sample = False)

        return stream
예제 #32
0
# Read windspeed data
st_WS = read(windsdatafile).select(channel='VWS')

# (st_SP + st_WS).plot(equal_scale=False, method='full')

# Create a stream just with winds that overlap SP data
st_windoverlap = Stream()
for tr in st_SP:
    stime = tr.stats[
        'starttime'] + 2.  # strip out first two seconds with transient
    etime = tr.stats['endtime']
    st_temp = read(windsdatafile, starttime=stime,
                   endtime=etime).select(channel='VWS')
    st_windoverlap += st_temp

st_windoverlap = st_windoverlap.merge().split()
# (st_windoverlap+st_SP).plot(method='full', equal_scale=False)
# One more plot with original
# (st_windoverlap+st_SP_IR).plot(method='full', equal_scale=False)
st_windoverlap.decimate(15)  #reduce sampling to 30 seconds
# (st_windoverlap+st_SP).plot(method='full', equal_scale=False)
print(st_windoverlap)

# Test comparison
# Initiate figure instance and clear csv file for output
fig = plt.figure()
csvfile = 'WS_SPviking_rms.csv'
with open(csvfile, 'w') as f:  # This should empty the file and add headers
    writer = csv.writer(f)
    writer.writerow(["RMS wind speed", "RMS SP Viking output"])
예제 #33
0
class WaveClient(object):
    '''The WaveClient class.


    Attributes
    ----------

    '''

    def __init__(self, name, description = '', stock_window = 3600):
        '''The constructor.

        Create an instance of the Project class.

        Parameters
        ----------
        source : String
            The waveform source.

            - sqlDB (A pSymson formatted SQL database)
            - earthworm (A earthworm waverserver)
            - css (A CSS formatted flat file database)
        '''
        # The logger.
        loggerName = __name__ + "." + self.__class__.__name__
        self.logger = logging.getLogger(loggerName)

        # The name of the waveclient.
        self.name = name

        # The description of the waveclient.
        self.description = description

        # The available data of the waveclient. This includes the
        # currently displayed time period and the preloaded data in
        # front and behind the time period.
        self.stock = Stream()


        # The trace data gaps present in data files.
        self.stock_data_gaps = []


        # The threading lock object for the stock stream.
        self.stock_lock = threading.Lock()

        # The threads used for preloading data.
        self.preload_threads = []

        # The time-window in seconds of the stock stream before and after the currently
        # displayed time-period. 
        self.stock_window = stock_window

    @property
    def mode(self):
        ''' The mode of the waveclient.

        '''
        return self.__class__.__name__

    @property
    def pickle_attributes(self):
        ''' The attributes which can be pickled.
        '''
        d = {}
        d['stock_window'] = self.stock_window
        return d


    def get_from_stock(self, network, station, location, channel, start_time, end_time):
        ''' Get the data of the specified scnl from the stock data.

        Parameters
        ----------
        network : String
            The network name.

        station : String
            The station name.

        location : String
            The location specifier.

        channel : String
            The channel name.

        startTime : UTCDateTime
            The begin datetime of the data to fetch.

        endTime : UTCDateTime
            The end datetime of the data to fetch.

        Returns
        -------
        stream : :class:`obspy.core.Stream`
            The requested waveform data. All traces are packed into one stream.

        '''
        if location == '--':
            location = None
        self.stock_lock.acquire()
        curStream = self.stock.select(station = station,
                                      channel = channel,
                                      network = network,
                                      location = location)
        curStream = curStream.copy()
        self.stock_lock.release()
        self.logger.debug('Selected stream from stock: %s', curStream)
        curStream.trim(starttime = start_time,
                       endtime = end_time)
        self.logger.debug('Trimmed stream to: %s', curStream)

        return curStream


    def add_to_stock(self, stream):
        ''' Add the passed stream to the stock data.

        '''
        # Merge and split the stream to handle overlapping data.
        try:
            stream.merge()
        except Exception:
            self.logger.exception("Error when merging the loaded stream: \n%s", stream)
            return

        stream = stream.split()
        self.stock_data_gaps.extend(stream.get_gaps())

        self.stock_lock.acquire()
        self.logger.debug("stockstream: %s", self.stock)
        self.logger.debug("add stream: %s", stream)
        self.stock = self.stock + stream.copy()
        try:
            self.stock.merge()
        except Exception:
            self.logger.exception("Error when merging the stock stream: \n%s", self.stock)
        self.logger.debug("stockstream: %s", self.stock)
        self.stock_lock.release()


    def trim_stock(self, start_time, end_time):
        ''' Trim the stock streams.

        '''
        self.stock_lock.acquire()
        self.stock.trim(starttime = start_time - self.stock_window, endtime = end_time + self.stock_window)
        self.stock_lock.release()
        remove_gaps = [x for x in self.stock_data_gaps if (x[4] > end_time + self.stock_window) or (x[5] < start_time - self.stock_window)]
        for cur_gap in remove_gaps:
            self.stock_data_gaps.remove(cur_gap)
        self.logger.debug('Removed gaps: %s', remove_gaps)
        self.logger.debug('Trimmed stock stream to %s - %s.', start_time - self.stock_window, end_time + self.stock_window)
        self.logger.debug('stock: %s', self.stock)


    def getWaveform(self,
                    startTime,
                    endTime,
                    network = None,
                    station = None,
                    location = None,
                    channel = None):
        ''' Get the waveform data for the specified parameters.

        Parameters
        ----------
        network : String
            The network name.

        station : String
            The station name.

        location : String
            The location specifier.

        channel : String
            The channel name.

        startTime : UTCDateTime
            The begin datetime of the data to fetch.

        endTime : UTCDateTime
            The end datetime of the data to fetch.

        Returns
        -------
        stream : :class:`obspy.core.Stream`
            The requested waveform data. All traces are packed into one stream.
        '''
        assert False, 'getWaveform must be defined'
예제 #34
0
   
    if st.count() > 0: # need waveforms to continue
        std = Stream()
        for tr in st:
            num = tr.stats.npts
            samp = tr.stats.sampling_rate             
            if num >= (samp*86400)*.8:
                std.append(tr)
        
        print('number of good waveforms ', std.count())
        if std.count() < 3: # want 3 or more waveforms for templates
            print('skipping event not enough good waveforms')
            
        else:         
            std.sort(['starttime'])
            std.merge(fill_value="interpolate")
            st1=std.copy()
            
            start = UTCDateTime(year = yr, julday = days)
            end = start + 86400
            st_filter=st1.trim(starttime=start, endtime=end)
        
#            print('GENERATING TEMPLATE FOR ' + str(start) +   ' SAVING AS MINISEED FILES & PLOTS ARE SAVED TO FOLDER.')
            # template matching
            template = template_gen.from_meta_file(meta_file = new_catalog, st = st_filter,
                                                   lowcut = 3, highcut = 10, filt_order = 4, samp_rate = 25,
                                                   prepick = 0.15, length = 4.6, swin = 'P',
                                                   parallel = True)
            
            if len(template[0]) < 3:
                print('Skipping template -- %i picks & %i WF in template' % (n_picks[i], len(template[0])))
예제 #35
0
def merge(nch,dstart,dend):
  
  ''' read the file 'fileName' that is in directory 'dataDir' into 
      a stream called 'st'

      The i-loop is over the channels/geophones, so the merged data of i geophones
      is plotted in the end.
      The k to m loops are over the data files, so traces from files 1 to 113 are 
      merged together which equals one hour of measurements.

      For each channel i the corresponding trace is picked from all the 113 files
      and then written into a new stream 'new_stream'. In this stream all 
      traces (consecutive in time) are merged then into one long trace using the 
      'merge-method 1'. As the new_stream only consists of one trace the index '[0]'
      can always be used.

      't' is an array, built to merge a combined time axis for all the merged traces.
      The new_stream data is finally plotted against the time axis. This procedure
      is iterated for each channel, whereas subplots are built. '''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream
  import matplotlib.pyplot as plt
  import numpy as np
  from numpy import matrix
  import time

  time_start = time.clock()

  dataDir = "/home/johannes/~home/Steinach/DEC_JAN/DEC_night/"
  outdir = "/home/johannes/~home/Steinach/outmseed/"

  ax = plt.subplot(111)

  TR = []

  for a in range(0,nch):
    TR.append([])

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    for i in range(0,nch):
      TR[i].append(st[i])
      
      
  for m in range(0,nch):
    new_stream = Stream(traces=TR[m])
    new_stream.merge(method=0, fill_value='interpolate')

    start = new_stream[0].stats.starttime
    end = new_stream[0].stats.endtime

    timeframe = str(m+1)+ "_" + str(start.year) +'.'+ str(start.julday) +'.'+ str(start.hour) +'.'+ str(start.minute) +'.'+ str(start.second) \
      +'-'+ str(end.year) +'.'+ str(end.julday) +'.'+ str(end.hour) +'.'+ str(end.minute) +'.'+ str(end.second)

    new_stream.write(outdir + "CH" + str(m+1) + "/" + timeframe + ".mseed", format="MSEED")

    new_stream[0].normalize()
    dt = new_stream[0].stats.starttime.timestamp


    
    t = np.linspace(new_stream[0].stats.starttime.timestamp - dt,
     new_stream[0].stats.endtime.timestamp -dt, new_stream[0].stats.npts)

    ax.plot(t, new_stream[0].data + 1.5* m)

  time_elapsed = (time.clock() - time_start)
  print time_elapsed
  plt.show()
예제 #36
0
def readANTELOPE(database,
                 station=None,
                 channel=None,
                 starttime=None,
                 endtime=None):
    '''
    Reads a portion of a Antelope wfdisc table to a Stream.
    
    Attempts to return one Trace per line of the 'wfdisc' view passed.    
    Additionally, will filter and cut with respect to any of the fields
    in the primary key IF specified. (sta chan time::endtime)
    
    NOTE: Currently MUST have both times (start/end) or neither.
    the returned Traces will have a new attribute, 'db'

    :type database: string or antelope.datascope.Dbptr
    :param database: Antelope database name or pointer
    :type station: string
    :param station: Station expression to subset
    :type channel: string
    :param channel: Channel expression to subset
    :type starttime: :class: `~obspy.core.utcdatetime.UTCDateTime`
    :param starttime: Desired start time
    :type endtime: :class: `~obspy.core.utcdatetime.UTCDateTime`
    :param endtime: Desired end time
        
    :rtype: :class: `~obspy.core.stream.Stream'
    :return: Stream with one Trace for each row of the database view
    
    .. rubric:: Example
    
    >>> st = readANTELOPE('/Volumes/colza_HD/dbs/land', station='TOL0', channel='LH.',
                        starttime=UTCDateTime(2008,6,13), endtime=UTCDateTime(2008,6,14))
    >>> print(st)
    6 Trace(s) in Stream:
    XA.TOL0..LHE | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples
    XA.TOL0..LHE | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples
    XA.TOL0..LHN | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples
    XA.TOL0..LHN | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples
    XA.TOL0..LHZ | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:21.640000Z | 1.0 Hz, 263 samples
    XA.TOL0..LHZ | 2008-06-13T00:04:22.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86138 samples
    
    Also adds a Dbrecord as an attribute of the Trace
    
    >>> st[0].db
    Dbrecord('View43' -> TOL0 LHE 1213229044.64::1213315451.64)
 
    '''
    if isinstance(database, Dbptr):
        db = Dbptr(database)
    elif isinstance(database, str):
        db = dbopen(database, 'r')
        #        db = dblookup(db,table='wfdisc')
        db = db.lookup(table='wfdisc')
    else:
        raise TypeError("Must input a string or pointer to a valid database")

    if station is not None:
        db = db.subset('sta=~/{0}/'.format(station))
    if channel is not None:
        db = db.subset('chan=~/{0}/'.format(channel))
    if starttime is not None and endtime is not None:
        ts = starttime.timestamp
        te = endtime.timestamp
        db = db.subset('endtime > {0} && time < {1}'.format(ts, te))
    else:
        ts = starttime
        te = endtime
    assert db.query(
        dbRECORD_COUNT) is not 0, "No records for given time period"

    st = Stream()
    for db.record in range(db.query(dbRECORD_COUNT)):
        fname = db.filename()
        dbr = Dbrecord(db)
        t0 = UTCDateTime(dbr.time)
        t1 = UTCDateTime(dbr.endtime)
        if dbr.time < ts:
            t0 = starttime
        if dbr.endtime > te:
            t1 = endtime
#        print("Loading file", fname, type(fname))
# db.filename returns a tuple, but obspy.read requires a string
# [1, filename]
# Just use the filename at position 1. This likely causes problems in some situations
# possibly, when more than one mseed files are required for a given time range
        fname = fname[1]
        #        print("Loading file", fname, type(fname))
        _st = read(fname, starttime=t0, endtime=t1)  # add format?
        _st = _st.select(station=dbr.sta,
                         channel=dbr.chan)  #not location aware
        _st[0].db = dbr
        st += _st
    # Close what we opened, BUT garbage collection may take care of this:
    # if you have an open pointer but pass db name as a string, global
    # use of your pointer won't work if this is uncommented:
    #
    #if isinstance(database,str):
    #    db.close()

# A time range covering two or more files lead to seperate traces for each file. Merge them
# together with st.merge() without gap interpolation. See obspy.merge() for details and different
# merge methods
    st.merge()
    return st
예제 #37
0
stime = UTCDateTime('2019-08-16 12:59:10')
etime = stime + 120

client = Client()
inv = client.get_stations(network=net,
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel=chan,
                          level="response")
st = Stream()
st += client.get_waveforms(net, sta, loc, chan, stime, etime)

st.detrend('constant')
st.merge(fill_value=0)
st.attach_response(inv)
st.remove_response(output="DISP")
#st.rotate(method="->ZNE",inventory=inv)
st.filter("bandpass", freqmin=.5, freqmax=5)
tr = st[0]
t = np.linspace(0, (tr.stats.npts - 1) / tr.stats.sampling_rate,
                num=tr.stats.npts)

fig = plt.figure(1, figsize=(12, 12))
plt.ylabel('Displacement (mm)', fontsize=14)
plt.xlim([0, 120])
plt.ylim([-.02, .02])
plt.xlabel('seconds after origin', fontsize=14)
plt.title('%s-%s-%s-%s, 2019-08-16, Hutchinson, KS Earthquake' %
          (net, sta, loc, chan),
예제 #38
0
def preprocess(db, stations, comps, goal_day, params, responses=None):

    datafiles = {}
    output = Stream()
    for station in stations:
        datafiles[station] = {}
        net, sta = station.split('.')
        gd = datetime.datetime.strptime(goal_day, '%Y-%m-%d')
        files = get_data_availability(db,
                                      net=net,
                                      sta=sta,
                                      starttime=gd,
                                      endtime=gd)
        for comp in comps:
            datafiles[station][comp] = []
        for file in files:
            if file.comp[-1] not in comps:
                continue
            fullpath = os.path.join(file.path, file.file)
            datafiles[station][file.comp[-1]].append(fullpath)

    for istation, station in enumerate(stations):
        net, sta = station.split(".")
        for comp in comps:
            files = eval("datafiles['%s']['%s']" % (station, comp))
            if len(files) != 0:
                logging.debug("%s.%s Reading %i Files" %
                              (station, comp, len(files)))
                stream = Stream()
                for file in sorted(files):
                    st = read(file,
                              dytpe=np.float,
                              starttime=UTCDateTime(gd),
                              endtime=UTCDateTime(gd) + 86400)
                    tmp = st.select(network=net, station=sta, component=comp)
                    if not len(tmp):
                        for tr in st:
                            tr.stats.network = net
                        st = st.select(network=net,
                                       station=sta,
                                       component=comp)
                    else:
                        st = tmp
                    for tr in st:
                        tr.data = tr.data.astype(np.float)
                    stream += st
                    del st
                stream.sort()
                stream.merge(method=1,
                             interpolation_samples=3,
                             fill_value=None)
                stream = stream.split()

                logging.debug("Checking sample alignment")
                for i, trace in enumerate(stream):
                    stream[i] = check_and_phase_shift(trace)

                logging.debug("Checking Gaps")
                if len(getGaps(stream)) > 0:
                    max_gap = 10
                    only_too_long = False
                    while getGaps(stream) and not only_too_long:
                        too_long = 0
                        gaps = getGaps(stream)
                        for gap in gaps:
                            if int(gap[-1]) <= max_gap:
                                stream[gap[0]] = stream[gap[0]].__add__(
                                    stream[gap[1]],
                                    method=1,
                                    fill_value="interpolate")
                                stream.remove(stream[gap[1]])
                                break
                            else:
                                too_long += 1
                        if too_long == len(gaps):
                            only_too_long = True
                stream = stream.split()
                taper_length = 20.0  # seconds
                for trace in stream:
                    if trace.stats.npts < 4 * taper_length * trace.stats.sampling_rate:
                        stream.remove(trace)
                    else:
                        trace.detrend(type="demean")
                        trace.detrend(type="linear")
                        trace.taper(max_percentage=None, max_length=1.0)

                if not len(stream):
                    logging.debug(" has only too small traces, skipping...")
                    continue

                for trace in stream:
                    logging.debug("%s.%s Highpass at %.2f Hz" %
                                  (station, comp, params.preprocess_highpass))
                    trace.filter("highpass",
                                 freq=params.preprocess_highpass,
                                 zerophase=True)

                    if trace.stats.sampling_rate != params.goal_sampling_rate:
                        logging.debug(
                            "%s.%s Lowpass at %.2f Hz" %
                            (station, comp, params.preprocess_lowpass))
                        trace.filter("lowpass",
                                     freq=params.preprocess_lowpass,
                                     zerophase=True,
                                     corners=8)

                        if params.resampling_method == "Resample":
                            logging.debug(
                                "%s.%s Downsample to %.1f Hz" %
                                (station, comp, params.goal_sampling_rate))
                            trace.data = resample(
                                trace.data, params.goal_sampling_rate /
                                trace.stats.sampling_rate, 'sinc_fastest')

                        elif params.resampling_method == "Decimate":
                            decimation_factor = trace.stats.sampling_rate / params.goal_sampling_rate
                            if not int(decimation_factor) == decimation_factor:
                                logging.warning(
                                    "%s.%s CANNOT be decimated by an integer factor, consider using Resample or Lanczos methods"
                                    " Trace sampling rate = %i ; Desired CC sampling rate = %i"
                                    %
                                    (station, comp, trace.stats.sampling_rate,
                                     params.goal_sampling_rate))
                                sys.stdout.flush()
                                sys.exit()
                            logging.debug("%s.%s Decimate by a factor of %i" %
                                          (station, comp, decimation_factor))
                            trace.data = trace.data[::int(decimation_factor)]

                        elif params.resampling_method == "Lanczos":
                            logging.debug(
                                "%s.%s Downsample to %.1f Hz" %
                                (station, comp, params.goal_sampling_rate))
                            trace.data = np.array(trace.data)
                            trace.interpolate(
                                method="lanczos",
                                sampling_rate=params.goal_sampling_rate,
                                a=1.0)

                        trace.stats.sampling_rate = params.goal_sampling_rate

                if get_config(db, 'remove_response', isbool=True):
                    logging.debug('%s Removing instrument response' %
                                  stream[0].id)
                    response_prefilt = eval(get_config(db, 'response_prefilt'))

                    response = responses[responses["channel_id"] ==
                                         stream[0].id]
                    if len(response) > 1:
                        response = response[
                            response["start_date"] < UTCDateTime(gd)]
                        response = response[
                            response["end_date"] > UTCDateTime(gd)]
                    elif len(response) == 0:
                        logging.info("No instrument response information "
                                     "for %s, exiting" % stream[0].id)
                        sys.exit()
                    datalesspz = response["paz"].values[0]
                    stream.simulate(
                        paz_remove=datalesspz,
                        remove_sensitivity=True,
                        pre_filt=response_prefilt,
                        paz_simulate=None,
                    )
                for tr in stream:
                    tr.data = tr.data.astype(np.float32)
                output += stream
                del stream
            del files
    clean_scipy_cache()
    return 0, output
예제 #39
0
    estime = ctime + 24 * 60 * 60.
    #st += read('/tr1/telemetry_days/II_BFO/2019/2019_' + str(day).zfill(3) + '/*LH*')
    st += client.get_waveforms("II",
                               "XBFO",
                               "*",
                               "LH*",
                               ctime,
                               estime,
                               attach_response=False)
#st.trim(endtime = UTCDateTime('2019-009T18:59:59.0'))
for tr in st:
    if tr.stats.channel == 'LH1':
        tr.stats.channel = 'LHN'
    if tr.stats.channel == 'LH2':
        tr.stats.channel = 'LHE'
st.merge()
st.sort(reverse=True)
# Data is in ZNE format
if debug:
    print(st)

comp = 'Z'
length = 400000
#length = 2**10
overlap = 0.5
#Treat data1 as the reference and rotate 2 and three to match 1
#angVec = [0., 0., 1.]

# For ENZ data
azis = [0., 0., 90.]
dips = [-90., 0., 0.]
예제 #40
0
파일: waveform.py 프로젝트: obspy/branches
class WaveformPlotting(object):
    """
    Class that provides several solutions for plotting large and small waveform
    data sets.

    .. warning::
        This class should NOT be used directly, instead use the
        :meth:`~obspy.core.stream.Stream.plot` method of the
        ObsPy :class:`~obspy.core.stream.Stream` or
        :class:`~obspy.core.trace.Trace` objects.

    It uses matplotlib to plot the waveforms.
    """

    def __init__(self, **kwargs):
        """
        Checks some variables and maps the kwargs to class variables.
        """
        self.stream = kwargs.get('stream')
        # Check if it is a Stream or a Trace object.
        if isinstance(self.stream, Trace):
            self.stream = Stream([self.stream])
        elif not isinstance(self.stream, Stream):
            msg = 'Plotting is only supported for Stream or Trace objects.'
            raise TypeError(msg)
        # Stream object should contain at least one Trace
        if len(self.stream) < 1:
            msg = "Empty object"
            raise IndexError(msg)
        # Type of the plot.
        self.type = kwargs.get('type', 'normal')
        # Start- and endtimes of the plots.
        self.starttime = kwargs.get('starttime', None)
        self.endtime = kwargs.get('endtime', None)
        self.fig_obj = kwargs.get('fig', None)
        # If no times are given take the min/max values from the stream object.
        if not self.starttime:
            self.starttime = min([trace.stats.starttime for \
                             trace in self.stream])
        if not self.endtime:
            self.endtime = max([trace.stats.endtime for \
                           trace in self.stream])
        # Map stream object and slice just in case.
        self.stream = self.stream.slice(self.starttime, self.endtime)
        # normalize times
        if self.type == 'relative':
            dt = self.starttime
            # fix plotting boundaries
            self.endtime = UTCDateTime(self.endtime - self.starttime)
            self.starttime = UTCDateTime(0)
            # fix stream times
            for tr in self.stream:
                tr.stats.starttime = UTCDateTime(tr.stats.starttime - dt)
        # Whether to use straight plotting or the fast minmax method.
        self.plotting_method = kwargs.get('method', 'fast')
        # Below that value the data points will be plotted normally. Above it
        # the data will be plotted using a different approach (details see
        # below). Can be overwritten by the above self.plotting_method kwarg.
        self.max_npts = 400000
        # If automerge is enabled. Merge traces with the same id for the plot.
        self.automerge = kwargs.get('automerge', True)
        # Set default values.
        # The default value for the size is determined dynamically because
        # there might be more than one channel to plot.
        self.size = kwargs.get('size', None)
        # Values that will be used to calculate the size of the plot.
        self.default_width = 800
        self.default_height_per_channel = 250
        if not self.size:
            self.width = 800
            # Check the kind of plot.
            if self.type == 'dayplot':
                self.height = 600
            else:
                # One plot for each trace.
                if self.automerge:
                    count = []
                    for tr in self.stream:
                        if hasattr(tr.stats, 'preview') and tr.stats.preview:
                            tr_id = tr.id + 'preview'
                        else:
                            tr_id = tr.id
                        if not tr_id in count:
                            count.append(tr_id)
                    count = len(count)
                else:
                    count = len(self.stream)
                self.height = count * 250
        else:
            self.width, self.height = self.size
        # Interval length in minutes for dayplot.
        self.interval = 60 * kwargs.get('interval', 15)
        # Scaling.
        self.vertical_scaling_range = kwargs.get('vertical_scaling_range',
                                                 None)
        # Dots per inch of the plot. Might be useful for printing plots.
        self.dpi = kwargs.get('dpi', 100)
        # Color of the graph.
        if self.type == 'dayplot':
            self.color = kwargs.get('color', ('#000000','#B2000F', '#004C12',
                                              '#0E01FF'))
            if isinstance(self.color, basestring):
                self.color = (self.color,)
            self.number_of_ticks = kwargs.get('number_of_ticks', None)
        else:
            self.color = kwargs.get('color', 'k')
            self.number_of_ticks = kwargs.get('number_of_ticks', 5)
        # Background and face color.
        self.background_color = kwargs.get('bgcolor', 'w')
        self.face_color = kwargs.get('face_color', 'w')
        # Transparency. Overwrites background and facecolor settings.
        self.transparent = kwargs.get('transparent', False)
        if self.transparent:
            self.background_color = None
        # Ticks.
        self.tick_format = kwargs.get('tick_format', '%H:%M:%S')
        self.tick_rotation = kwargs.get('tick_rotation', 0)
        # Whether or not to save a file.
        self.outfile = kwargs.get('outfile')
        self.handle = kwargs.get('handle')
        # File format of the resulting file. Usually defaults to PNG but might
        # be dependent on your matplotlib backend.
        self.format = kwargs.get('format')

    def plotWaveform(self, *args, **kwargs):
        """
        Creates a graph of any given ObsPy Stream object. It either saves the
        image directly to the file system or returns an binary image string.

        For all color values you can use legit HTML names, HTML hex strings
        (e.g. '#eeefff') or you can pass an R , G , B tuple, where each of
        R , G , B are in the range [0, 1]. You can also use single letters for
        basic built-in colors ('b' = blue, 'g' = green, 'r' = red, 'c' = cyan,
        'm' = magenta, 'y' = yellow, 'k' = black, 'w' = white) and gray shades
        can be given as a string encoding a float in the 0-1 range.
        """
        # Setup the figure if not passed explicitly.
        if not self.fig_obj:
            self.__setupFigure()
        else:
            self.fig = self.fig_obj
        # Determine kind of plot and do the actual plotting.
        if self.type == 'dayplot':
            self.plotDay(*args, **kwargs)
        else:
            self.plot(*args, **kwargs)
        # Adjust the subplot so there is always a margin of 80 px on every
        # side except for plots with just a single trace.
        if self.type != 'dayplot':
            if self.height >= 400:
                fract_y = 80.0 / self.height
            else:
                fract_y = 25.0 / self.height
            fract_x = 80.0 / self.width
            self.fig.subplots_adjust(top=1.0 - fract_y, bottom=fract_y,
                                     left=fract_x, right=1 - fract_x)
        self.fig.canvas.draw()
        # The following just serves as a unified way of saving and displaying
        # the plots.
        if not self.transparent:
            extra_args = {'dpi': self.dpi,
                          'facecolor': self.face_color,
                          'edgecolor': self.face_color}
        else:
            extra_args = {'dpi': self.dpi,
                          'transparent': self.transparent}
        if self.outfile:
            # If format is set use it.
            if self.format:
                self.fig.savefig(self.outfile, format=self.format,
                                 **extra_args)
            # Otherwise use format from self.outfile or default to PNG.
            else:
                self.fig.savefig(self.outfile, **extra_args)
        else:
            # Return an binary imagestring if not self.outfile but self.format.
            if self.format:
                imgdata = StringIO.StringIO()
                self.fig.savefig(imgdata, format=self.format,
                                 **extra_args)
                imgdata.seek(0)
                return imgdata.read()
            elif self.handle:
                return self.fig
            else:
                if not self.fig_obj:
                    plt.show()

    def plot(self, *args, **kwargs):
        """
        Plot the Traces showing one graph per Trace.

        Plots the whole time series for self.max_npts points and less. For more
        points it plots minmax values.
        """
        stream_new = []
        # Just remove empty traces.
        if not self.automerge:
            for tr in self.stream:
                stream_new.append([])
                if len(tr.data):
                    stream_new[-1].append(tr)
        else:
            # Generate sorted list of traces (no copy)
            # Sort order, id, starttime, endtime
            ids = []
            for tr in self.stream:
                if hasattr(tr.stats, 'preview') and tr.stats.preview:
                    id = tr.id + 'preview'
                else:
                    id = tr.id
                if not id in ids:
                    ids.append(id)
            for id in ids:
                stream_new.append([])
                for tr in self.stream:
                    if hasattr(tr.stats, 'preview') and tr.stats.preview:
                        tr_id = tr.id + 'preview'
                    else:
                        tr_id = tr.id
                    if tr_id == id:
                        # does not copy the elements of the data array
                        tr_ref = copy(tr)
                        # Trim does nothing if times are outside
                        if self.starttime >= tr_ref.stats.endtime or \
                                self.endtime <= tr_ref.stats.starttime:
                            continue
                        if tr_ref.data.size:
                            stream_new[-1].append(tr_ref)
                # delete if empty list
                if not len(stream_new[-1]):
                    stream_new.pop()
                    continue
                stream_new[-1].sort(key=lambda x: x.stats.endtime)
                stream_new[-1].sort(key=lambda x: x.stats.starttime)
        # If everything is lost in the process raise an Exception.
        if not len(stream_new):
            raise Exception("Nothing to plot")
        # Create helper variable to track ids and min/max/mean values.
        self.stats = []
        # Loop over each Trace and call the appropriate plotting method.
        self.axis = []
        for _i, tr in enumerate(stream_new):
            # Each trace needs to have the same sampling rate.
            sampling_rates = set([_tr.stats.sampling_rate for _tr in tr])
            if len(sampling_rates) > 1:
                msg = "All traces with the same id need to have the same " + \
                      "sampling rate."
                raise Exception(msg)
            sampling_rate = sampling_rates.pop()
            if self.background_color:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1,
                                          axisbg=self.background_color)
            else:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1)
            self.axis.append(ax)
            # XXX: Also enable the minmax plotting for previews.
            if self.plotting_method != 'full' and \
                ((self.endtime - self.starttime) * sampling_rate > \
                 self.max_npts):
                self.__plotMinMax(stream_new[_i], ax, *args, **kwargs)
            else:
                self.__plotStraight(stream_new[_i], ax, *args, **kwargs)
        # Set ticks.
        self.__plotSetXTicks()
        self.__plotSetYTicks()

    def plotDay(self, *args, **kwargs):
        """
        Extend the seismogram.
        """
        # Create a copy of the stream because it might be operated on.
        self.stream = self.stream.copy()
        # Merge and trim to pad.
        self.stream.merge()
        if len(self.stream) != 1:
            msg = "All traces need to be of the same id for a dayplot"
            raise ValueError(msg)
        self.stream.trim(self.starttime, self.endtime, pad=True)
        # Get minmax array.
        self.__dayplotGetMinMaxValues(self, *args, **kwargs)
        # Normalize array
        self.__dayplotNormalizeValues(self, *args, **kwargs)
        # Get timezone information. If none is  given, use local time.
        self.time_offset = kwargs.get('time_offset',
                           round((UTCDateTime(datetime.now()) - \
                           UTCDateTime()) / 3600.0, 2))
        self.timezone = kwargs.get('timezone', 'local time')
        # Try to guess how many steps are needed to advance one full time unit.
        self.repeat = None
        intervals = self.extreme_values.shape[0]
        if self.interval < 60 and 60 % self.interval == 0:
            self.repeat = 60 / self.interval
        elif self.interval < 1800 and 3600 % self.interval == 0:
            self.repeat = 3600 / self.interval
        # Otherwise use a maximum value of 10.
        else:
            if intervals >= 10:
                self.repeat = 10
            else:
                self.repeat = intervals
        # Create axis to plot on.
        if self.background_color:
            ax = self.fig.add_subplot(1, 1, 1, axisbg=self.background_color)
        else:
            ax = self.fig.add_subplot(1, 1, 1)
        # Adjust the subplots to be symmetrical. Also make some more room
        # at the top.
        self.fig.subplots_adjust(left=0.12, right=0.88, top=0.88)
        # Create x_value_array.
        aranged_array = np.arange(self.width)
        x_values = np.empty(2 * self.width)
        x_values[0::2] = aranged_array
        x_values[1::2] = aranged_array
        intervals = self.extreme_values.shape[0]
        # Loop over each step.
        for _i in xrange(intervals):
            # Create offset array.
            y_values = np.ma.empty(self.width * 2)
            y_values.fill(intervals - (_i + 1))
            # Add min and max values.
            y_values[0::2] += self.extreme_values[_i, :, 0]
            y_values[1::2] += self.extreme_values[_i, :, 1]
            # Plot the values.
            ax.plot(x_values, y_values,
                    color=self.color[_i % len(self.color)])
        # Set ranges.
        ax.set_xlim(0, self.width - 1)
        ax.set_ylim(-0.3, intervals + 0.3)
        self.axis = [ax]
        # Set ticks.
        self.__dayplotSetYTicks()
        self.__dayplotSetXTicks()
        # Choose to show grid but only on the x axis.
        self.fig.axes[0].grid()
        self.fig.axes[0].yaxis.grid(False)
        # Set the title of the plot.
        #suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
        #self.fig.suptitle(suptitle, fontsize='small')

    def __plotStraight(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Just plots the data samples in the self.stream. Useful for smaller
        datasets up to around 1000000 samples (depending on the machine its
        being run on).

        Slow and high memory consumption for large datasets.
        """
        # Copy to avoid any changes to original data.
        trace = deepcopy(trace)
        if len(trace) > 1:
            stream = Stream(traces=trace)
            # Merge with 'interpolation'. In case of overlaps this method will
            # always use the longest available trace.
            if hasattr(trace[0].stats, 'preview') and trace[0].stats.preview:
                stream = Stream(traces=stream)
                stream = mergePreviews(stream)
            else:
                stream.merge(method=1)
            trace = stream[0]
        else:
            trace = trace[0]
        # Check if it is a preview file and adjust accordingly.
        # XXX: Will look weird if the preview file is too small.
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            # Mask the gaps.
            trace.data = np.ma.masked_array(trace.data)
            trace.data[trace.data == -1] = np.ma.masked
            # Recreate the min_max scene.
            dtype = trace.data.dtype
            old_time_range = trace.stats.endtime - trace.stats.starttime
            data = np.empty(2 * trace.stats.npts, dtype=dtype)
            data[0::2] = trace.data / 2.0
            data[1::2] = -trace.data / 2.0
            trace.data = data
            # The times are not supposed to change.
            trace.stats.delta = old_time_range / float(trace.stats.npts - 1)
        # Write to self.stats.
        calib = trace.stats.calib
        max = trace.data.max()
        min = trace.data.min()
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            tr_id = trace.id + ' [preview]'
        else:
            tr_id = trace.id
        self.stats.append([tr_id, calib * trace.data.mean(),
                           calib * min, calib * max])
        # Pad the beginning and the end with masked values if necessary. Might
        # seem like overkill but it works really fast and is a clean solution
        # to gaps at the beginning/end.
        concat = [trace]
        if self.starttime != trace.stats.starttime:
            samples = (trace.stats.starttime - self.starttime) * \
                trace.stats.sampling_rate
            temp = [np.ma.masked_all(int(samples))]
            concat = temp.extend(concat)
            concat = temp
        if self.endtime != trace.stats.endtime:
            samples = (self.endtime - trace.stats.endtime) * \
                      trace.stats.sampling_rate
            concat.append(np.ma.masked_all(int(samples)))
        if len(concat) > 1:
            # Use the masked array concatenate, otherwise it will result in a
            # not masked array.
            trace.data = np.ma.concatenate(concat)
            # set starttime and calculate endtime
            trace.stats.starttime = self.starttime
        trace.data *= calib
        ax.plot(trace.data, color=self.color)
        # Set the x limit for the graph to also show the masked values at the
        # beginning/end.
        ax.set_xlim(0, len(trace.data) - 1)

    def __plotMinMax(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Plots the data using a min/max approach that calculated the minimum and
        maximum values of each "pixel" and than plots only these values. Works
        much faster with large data sets.
        """
        # Some variables to help calculate the values.
        starttime = self.starttime.timestamp
        endtime = self.endtime.timestamp
        # The same trace will always have the same sampling_rate.
        sampling_rate = trace[0].stats.sampling_rate
        # The samples per resulting pixel.
        pixel_length = int((endtime - starttime) / self.width *
                           sampling_rate)
        # Loop over all the traces. Do not merge them as there are many samples
        # and therefore merging would be slow.
        for _i, _t in enumerate(trace):
            # Get the start of the next pixel in case the starttime of the
            # trace does not match the starttime of the plot.
            ts = _t.stats.starttime
            if ts > self.starttime:
                start = int(ceil(((ts - self.starttime) * \
                        sampling_rate) / pixel_length))
                # Samples before start.
                prestart = int(((self.starttime + start * pixel_length /
                           sampling_rate) - ts) * sampling_rate)
            else:
                start = 0
                prestart = 0
            # Figure out the number of pixels in the current trace.
            length = len(_t.data) - prestart
            pixel_count = int(length // pixel_length)
            rest = int(length % pixel_length)
            # Reference to new data array which does not copy data but is
            # reshapeable.
            data = _t.data[prestart: prestart + pixel_count * pixel_length]
            data = data.reshape(pixel_count, pixel_length)
            # Calculate extreme_values and put them into new array.
            extreme_values = np.ma.masked_all((self.width, 2), dtype=np.float)
            min = data.min(axis=1) * _t.stats.calib
            max = data.max(axis=1) * _t.stats.calib
            extreme_values[start: start + pixel_count, 0] = min
            extreme_values[start: start + pixel_count, 1] = max
            # First and last and last pixel need separate treatment.
            if start and prestart:
                extreme_values[start - 1, 0] = \
                    _t.data[:prestart].min() * _t.stats.calib
                extreme_values[start - 1, 1] = \
                    _t.data[:prestart].max() * _t.stats.calib
            if rest:
                if start + pixel_count == self.width:
                    index = self.width - 1
                else:
                    index = start + pixel_count
                extreme_values[index, 0] = \
                    _t.data[-rest:].min() * _t.stats.calib
                extreme_values[index, 1] = \
                    _t.data[-rest:].max() * _t.stats.calib
            # Use the first array as a reference and merge all following
            # extreme_values into it.
            if _i == 0:
                minmax = extreme_values
            else:
                # Merge minmax and extreme_values.
                min = np.ma.empty((self.width, 2))
                max = np.ma.empty((self.width, 2))
                # Fill both with the values.
                min[:, 0] = minmax[:, 0]
                min[:, 1] = extreme_values[:, 0]
                max[:, 0] = minmax[:, 1]
                max[:, 1] = extreme_values[:, 1]
                # Find the minimum and maximum values.
                min = min.min(axis=1)
                max = max.max(axis=1)
                # Write again to minmax.
                minmax[:, 0] = min
                minmax[:, 1] = max
        # Write to self.stats.
        self.stats.append([trace[0].id, minmax.mean(),
                           minmax[:, 0].min(),
                           minmax[:, 1].max()])
        # Finally plot the data.
        x_values = np.empty(2 * self.width)
        aranged = np.arange(self.width)
        x_values[0::2] = aranged
        x_values[1::2] = aranged
        # Initialize completely masked array. This version is a little bit
        # slower than first creating an empty array and then setting the mask
        # to True. But on NumPy 1.1 this results in a 0-D array which can not
        # be indexed.
        y_values = np.ma.masked_all(2 * self.width)
        y_values[0::2] = minmax[:, 0]
        y_values[1::2] = minmax[:, 1]
        ax.plot(x_values, y_values, color=self.color)
        # Set the x-limit to avoid clipping of masked values.
        ax.set_xlim(0, self.width - 1)

    def __plotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot and sets time ticks on the x axis.
        """
        # Loop over all axes.
        for ax in self.axis:
            # Get the xlimits.
            start, end = ax.get_xlim()
            # Set the location of the ticks.
            ax.set_xticks(np.linspace(start, end, self.number_of_ticks))
            # Figure out times.
            interval = float(self.endtime - self.starttime) / \
                       (self.number_of_ticks - 1)
            # Set the actual labels.
            if self.type == 'relative':
                s = ['%.2f' % (self.starttime + _i * interval).timestamp \
                          for _i in range(self.number_of_ticks)]
            else:
                labels = [(self.starttime + _i * \
                          interval).strftime(self.tick_format) for _i in \
                          range(self.number_of_ticks)]

            ax.set_xticklabels(labels, fontsize='small',
                               rotation=self.tick_rotation)

    def __plotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot, reads self.stats and sets all ticks on
        the y axis.

        This method also adjusts the y limits so that the mean value is always
        in the middle of the graph and all graphs are equally scaled.
        """
        # Figure out the maximum distance from the mean value to either end.
        # Add 10 percent for better looking graphs.
        max_distance = max([max(trace[1] - trace[2], trace[3] - trace[1])
                            for trace in self.stats]) * 1.1
        # Loop over all axes.
        for _i, ax in enumerate(self.axis):
            mean = self.stats[_i][1]
            # Set the ylimit.
            min_range = mean - max_distance
            max_range = mean + max_distance
            # Set the location of the ticks.
            ticks = [mean - 0.75 * max_distance,
                     mean - 0.5 * max_distance,
                     mean - 0.25 * max_distance,
                     mean,
                     mean + 0.25 * max_distance,
                     mean + 0.5 * max_distance,
                     mean + 0.75 * max_distance]
            ax.set_yticks(ticks)
            # Setup format of the major ticks
            if max(ticks) - min(ticks) > 10:
                fmt = '%d'
            else:
                fmt = '%.2g'
            ax.set_yticklabels([fmt % t for t in ax.get_yticks()],
                               fontsize='small')
            # Set the title of each plot.
            ax.set_title(self.stats[_i][0], horizontalalignment='left',
                      fontsize='small', verticalalignment='center')
            ax.set_ylim(min_range, max_range)

    def __dayplotGetMinMaxValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Takes a Stream object and calculates the min and max values for each
        pixel in the dayplot.

        Writes a three dimensional array. The first axis is the step, i.e
        number of trace, the second is the pixel in that step and the third
        contains the minimum and maximum value of the pixel.
        """
        # Helper variables for easier access.
        trace = self.stream[0]
        trace_length = len(trace.data)

        # Samples per interval.
        spi = int(self.interval * trace.stats.sampling_rate)
        # Check the approximate number of samples per pixel and raise
        # error as fit.
        spp = float(spi) / self.width
        if spp < 1.0:
            msg = """
            Too few samples to use dayplot with the given arguments.
            Adjust your arguments or use a different plotting method.
            """
            msg = " ".join(msg.strip().split())
            raise ValueError(msg)
        # Number of intervals plotted.
        noi = float(trace_length) / spi
        inoi = int(round(noi))
        # Plot an extra interval if at least 2 percent of the last interval
        # will actually contain data. Do it this way to lessen floating point
        # inaccuracies.
        if abs(noi - inoi) > 2E-2:
            noi = inoi + 1
        else:
            noi = inoi

        # Adjust data. Fill with masked values in case it is necessary.
        number_of_samples = noi * spi
        delta = number_of_samples - trace_length
        if delta < 0:
            trace.data = trace.data[:number_of_samples]
        elif delta > 0:
            trace.data = np.ma.concatenate([trace.data,
                            createEmptyDataChunk(delta, trace.data.dtype)])

        # Create array for min/max values. Use masked arrays to handle gaps.
        extreme_values = np.ma.empty((noi, self.width, 2))
        trace.data.shape = (noi, spi)

        ispp = int(spp)
        fspp = spp % 1.0
        if fspp == 0.0:
            delta = None
        else:
            delta = spi - ispp * self.width

        # Loop over each interval to avoid larger errors towards the end.
        for _i in range(noi):
            if delta:
                cur_interval = trace.data[_i][:-delta]
                rest = trace.data[_i][-delta:]
            else:
                cur_interval = trace.data[_i]
            cur_interval.shape = (self.width, ispp)
            extreme_values[_i, :, 0] = cur_interval.min(axis=1)
            extreme_values[_i, :, 1] = cur_interval.max(axis=1)
            # Add the rest.
            if delta:
                extreme_values[_i, -1, 0] = min(extreme_values[_i, -1, 0],
                                                rest.min())
                extreme_values[_i, -1, 1] = max(extreme_values[_i, -1, 0],
                                                rest.max())
        # Set class variable.
        self.extreme_values = extreme_values

    def __dayplotNormalizeValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Normalizes all values in the 3 dimensional array, so that the minimum
        value will be 0 and the maximum value will be 1.

        It will also convert all values to floats.
        """
        # Convert to native floats.
        self.extreme_values = self.extreme_values.astype(np.float) * \
                              self.stream[0].stats.calib
        # Make sure that the mean value is at 0
        self.extreme_values -= self.extreme_values.mean()

        # Scale so that 99.5 % of the data will fit the given range.
        if self.vertical_scaling_range is None:
            percentile_delta = 0.005
            max_values = self.extreme_values[:, :, 1].compressed()
            min_values = self.extreme_values[:, :, 0].compressed()
            # Remove masked values.
            max_values.sort()
            min_values.sort()
            length = len(max_values)
            index = int((1.0 - percentile_delta) * length)
            max_val = max_values[index]
            index = int(percentile_delta * length)
            min_val = min_values[index]
        # Exact fit.
        elif float(self.vertical_scaling_range) == 0.0:
            max_val = self.extreme_values[:, :, 1].max()
            min_val = self.extreme_values[:, :, 0].min()
        # Fit with custom range.
        else:
            max_val = min_val = abs(self.vertical_scaling_range) / 2.0

        # Scale from 0 to 1.
        self.extreme_values = self.extreme_values / (max(abs(max_val),
                                                         abs(min_val)) * 2)
        self.extreme_values += 0.5

    def __dayplotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the xticks for the dayplot.
        """
        max_value = self.width - 1
        # Check whether it are sec/mins/hours and convert to a universal unit.
        if self.interval < 240:
            time_type = 'seconds'
            time_value = self.interval
        elif self.interval < 24000:
            time_type = '[min]'
            time_value = self.interval / 60
        else:
            time_type = 'hours'
            time_value = self.interval / 3600
        count = None
        # Hardcode some common values. The plus one is itentional. It had
        # hardly any performance impact and enhances readability.
        if self.interval == 15 * 60:
            count = 15 + 1
        elif self.interval == 20 * 60:
            count = 4 + 1
        elif self.interval == 30 * 60:
            count = 6 + 1
        elif self.interval == 60 * 60:
            count = 4 + 1
        elif self.interval == 90 * 60:
            count = 6 + 1
        elif self.interval == 120 * 60:
            count = 4 + 1
        elif self.interval == 180 * 60:
            count = 6 + 1
        elif self.interval == 240 * 60:
            count = 6 + 1
        elif self.interval == 300 * 60:
            count = 6 + 1
        elif self.interval == 360 * 60:
            count = 12 + 1
        elif self.interval == 720 * 60:
            count = 12 + 1
        # Otherwise run some kind of autodetection routine.
        if not count:
            # Up to 15 time units and if its a full number, show every unit.
            if time_value <= 15 and time_value % 1 == 0:
                count = time_value
            # Otherwise determine whether they are dividable for numbers up to
            # 15. If a number is not dividable just show 10 units.
            else:
                count = 10
                for _i in xrange(15, 1, -1):
                    if time_value % _i == 0:
                        count = _i
                        break
            # Show at least 5 ticks.
            if count < 5:
                count = 5
        # Everything can be overwritten by user specified number of ticks.
        if self.number_of_ticks:
            count = self.number_of_ticks
        # Calculate and set ticks.
        ticks = np.linspace(0.0, max_value, count)
        ticklabels = ['%i' % _i for _i in np.linspace(0.0,
                                    time_value, count)]
        self.axis[0].set_xticks(ticks)
        self.axis[0].set_xticklabels(ticklabels, rotation=self.tick_rotation)
        self.axis[0].set_xlabel('Zeit %s' % time_type)

    def __dayplotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the yticks for the dayplot.
        """
        intervals = self.extreme_values.shape[0]
        # Do not display all ticks except if it are five or less steps.
        if intervals <= 5:
            tick_steps = range(0, intervals)
            ticks = np.arange(intervals, 0, -1, dtype=np.float)
            ticks -= 0.5
        else:
            tick_steps = range(0, intervals, self.repeat)
            ticks = np.arange(intervals, 0, -1 * self.repeat, dtype=np.float)
            ticks -= 0.5
        ticklabels = [(self.starttime + (_i + 1) * self.interval + \
                      self.time_offset * 3600).strftime('%H:%M') \
                      for _i in tick_steps]

        self.axis[0].set_yticks(ticks)
        self.axis[0].set_yticklabels(ticklabels)
        # self.axis[0].set_ylabel('UTC')
        # Save range.
        yrange = self.axis[0].get_ylim()
        # Create twin axis.
        #XXX
        self.twin = self.axis[0].twinx()
        self.twin.set_ylim(yrange)
        self.twin.set_yticks(ticks)
        ticklabels = [(self.starttime + _i * self.interval).strftime('%H:%M') \
                      for _i in tick_steps]
        self.twin.set_yticklabels(ticklabels)
        # Complicated way to calculate the label of the y-Axis showing the
        # second time zone.
        sign = '%+i' % self.time_offset
        sign = sign[0]
      #  time_label = self.timezone.strip() + ' (UTC%s%02i:%02i)' % \
        time_label = 'Berlin' + ' (UTC%s%02i:%02i)' % \
                     (sign, abs(self.time_offset), (self.time_offset % 1 * 60))
        self.axis[0].set_ylabel(time_label)
        self.twin.set_ylabel('UTC')

    def __setupFigure(self):
        """
        The design and look of the whole plot to be produced.
        """
        # Setup figure and axes
        self.fig = plt.figure(num=None, dpi=self.dpi,
                              figsize=(float(self.width) / self.dpi,
                                       float(self.height) / self.dpi))
        # XXX: Figure out why this is needed sometimes.
        # Set size and dpi.
        self.fig.set_dpi(self.dpi)
        self.fig.set_figwidth(float(self.width) / self.dpi)
        self.fig.set_figheight(float(self.height) / self.dpi)
        # hide time information if set as option
        if self.type == 'relative':
            return
        if self.type == 'dayplot':
            suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
            self.fig.suptitle(suptitle, y=0.94, fontsize='small')
        else:
           pattern = '%Y-%m-%dT%H:%M:%SZ'
           suptitle = '%s  -  %s' % (self.starttime.strftime(pattern),
                 self.endtime.strftime(pattern))
           self.fig.suptitle(suptitle, x=0.02, y=0.96, fontsize='small',
                 horizontalalignment='left')
예제 #41
0
st = Stream()
num_stations = 0
exceptions = []
for station in STATIONS:
    try:
        # we request 60s more at start and end and cut them off later to avoid
        # a false trigger due to the tapering during instrument correction
        tmp = client.waveform.getWaveform(NET, station, "", CHANNEL, T1 - 180,
                                          T2 + 180, getPAZ=True,
                                          getCoordinates=True)
    except Exception, e:
        exceptions.append("%s: %s" % (e.__class__.__name__, e))
        continue
    st.extend(tmp)
    num_stations += 1
st.merge(-1)
st.sort()

summary = []
summary.append("#" * 79)
summary.append("######## %s  ---  %s ########" % (T1, T2))
summary.append("#" * 79)
summary.append(st.__str__(extended=True))
if exceptions:
    summary.append("#" * 33 + " Exceptions  " + "#" * 33)
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
예제 #42
0
def computePSD(sp,net,sta,loc,chan,dateval):
	#Here we compute the PSD
	lenfft = 5000
	lenol = 2500
	#Here are the different period bands.  These could be done as a dicitionary
	#if Adam B. wants to clean this up using a dicitionary that is fine with me
	permin1 = 4
	permax1 = 8
	permin2 = 18
	permax2 = 22
	permin3 = 90
	permax3 = 110
	permin4 = 200
	permax4 = 500
	
	try:

		#Get the response and compute amplitude response	
		paz=getPAZ2(sp,net,sta,loc,chan,dateval)
		respval = pazToFreqResp(paz['poles'],paz['zeros'],paz['sensitivity']*paz['gain'], \
			t_samp = 1, nfft = lenfft,freq=False)[1:]
		respval = numpy.absolute(respval*numpy.conjugate(respval))

		#Get the data to compute the PSD
		if net in set(['IW','NE','US']):
			locStr = '/xs1'
		else:
			locStr = '/xs0'
		readDataString = locStr + '/seed/' + net + '_' + sta + '/' + str(dateval.year) + \
			'/' + str(dateval.year) + '_' + str(dateval.julday).zfill(3) + '_' + net + \
			'_' + sta + '/' + loc + '_' + chan + '*.seed'
		datafiles = glob.glob(readDataString)
		st = Stream()
		for datafile in datafiles:
			st += read(datafile)
		st.merge(method=-1)

		#Compute the PSD
		cpval,fre = psd(st[0].data,NFFT=lenfft,Fs=1,noverlap=lenol,scale_by_freq=True)
		per = 1/fre[1:]
		cpval = 10*numpy.log10(((2*pi*fre[1:])**2)*cpval[1:]/respval)
		perminind1 = numpy.abs(per-permin1).argmin()
		permaxind1 = numpy.abs(per-permax1).argmin()
		perminind2 = numpy.abs(per-permin2).argmin()
		permaxind2 = numpy.abs(per-permax2).argmin()
		perminind3 = numpy.abs(per-permin3).argmin()
		permaxind3 = numpy.abs(per-permax3).argmin()
		perminind4 = numpy.abs(per-permin4).argmin()
		permaxind4 = numpy.abs(per-permax4).argmin()
		perNLNM,NLNM = get_NLNM()
		perNLNMminind1 = numpy.abs(perNLNM-permin1).argmin()
		perNLNMmaxind1 = numpy.abs(perNLNM-permax1).argmin()
		perNLNMminind2 = numpy.abs(perNLNM-permin2).argmin()
		perNLNMmaxind2 = numpy.abs(perNLNM-permax2).argmin()
		perNLNMminind3 = numpy.abs(perNLNM-permin3).argmin()
		perNLNMmaxind3 = numpy.abs(perNLNM-permax3).argmin()
		perNLNMminind4 = numpy.abs(perNLNM-permin4).argmin()
		perNLNMmaxind4 = numpy.abs(perNLNM-permax4).argmin()

		cpval1 = round(numpy.average(cpval[permaxind1:perminind1]) - \
			numpy.average(NLNM[perNLNMmaxind1:perNLNMminind1]),2)
		cpval2 = round(numpy.average(cpval[permaxind2:perminind2]) - \
			numpy.average(NLNM[perNLNMmaxind2:perNLNMminind2]),2)
		cpval3 = round(numpy.average(cpval[permaxind3:perminind3]) - \
			numpy.average(NLNM[perNLNMmaxind3:perNLNMminind3]),2)
		cpval4 = round(numpy.average(cpval[permaxind4:perminind4]) - \
			numpy.average(NLNM[perNLNMmaxind4:perNLNMminind4]),2)
	except:
		cpval1 = 0
		cpval2 = 0
		cpval3 = 0
		cpval4 = 0
	return cpval1, cpval2, cpval3,cpval4
예제 #43
0
    def get_waveforms(self,
                      network,
                      station,
                      location,
                      channel,
                      starttime,
                      endtime,
                      automerge=False,
                      trace_count_threshold=200):

        starttime = UTCDateTime(starttime).timestamp
        endtime = UTCDateTime(endtime).timestamp
        # create a list of asdf datasets that may contain queried data
        dslistIndices = []

        for i, (stime, etime) in enumerate(
                zip(self.waveform_start_time_list,
                    self.waveform_end_time_list)):
            if (stime == None or etime == None): continue
            if (FederatedASDFDataSetMemVariant.hasOverlap(
                    starttime, endtime, stime, etime)):
                dslistIndices.append(i)
            # end for
        # end for

        s = Stream()
        for i in dslistIndices:
            #print 'Accessing file: %s'%(self.asdf_file_names[i])

            ds = self.asdf_datasets[i]

            cs = Stream()
            if (self.tree_list[i]):
                val = self.tree_list[i][network][station][location][channel]
                if (type(val) == index.Index):
                    tag_indices = list(
                        val.intersection((starttime, 1, endtime, 1)))

                    if (len(tag_indices) > trace_count_threshold): return cs

                    station_data = ds.waveforms['%s.%s' % (network, station)]
                    for ti in tag_indices:
                        try:
                            cs += station_data[self.asdf_tags_list[i][ti]]
                        except:
                            pass
                        # end try
                    # end for
                # end if

                if (automerge):
                    try:
                        cs.merge(method=-1)
                    except:
                        pass
                    # end try
                # end if
            else:
                pass
                # disable accessing waveforms through the ASDFDataset interface
                # for the time being
                #cs = ds.get_waveforms(network, station, location, channel, starttime,
                #                      endtime, 'raw_recording', automerge)
            # end if

            # Trim traces
            for t in cs:
                t.trim(starttime=UTCDateTime(starttime),
                       endtime=UTCDateTime(endtime))
                s += t
            # end for
        # end for

        return s
                                               STA,
                                               loc,
                                               CHAN,
                                               STARTTIME -
                                               2 * config.taper_val,
                                               ENDTIME + 2 * config.taper_val,
                                               cleanup=True)
                    st += tr
                except Exception:
                    continue

            if not st:
                logging.error(f"No data retrieved for {STA}")
                continue

            st.merge(fill_value='latest')
            st.trim(STARTTIME - 2 * config.taper_val,
                    ENDTIME + 2 * config.taper_val,
                    pad='true',
                    fill_value=0)
            st.sort()
            logging.info(st)

            # print('Removing sensitivity...')
            # st.remove_sensitivity()

            stf = st.copy()
            stf.detrend('demean')
            stf.taper(max_percentage=None, max_length=config.taper_val)
            stf.filter("bandpass",
                       freqmin=FMIN,
예제 #45
0
st = Stream()
num_stations = 0
exceptions = []
for station in STATIONS:
    try:
        # we request 60s more at start and end and cut them off later to avoid
        # a false trigger due to the tapering during instrument correction
        tmp = client.waveform.getWaveform(NET, station, "", CHANNEL, T1 - 180,
                                          T2 + 180, getPAZ=True,
                                          getCoordinates=True)
    except Exception, e:
        exceptions.append("%s: %s" % (e.__class__.__name__, e))
        continue
    st.extend(tmp)
    num_stations += 1
st.merge(-1)
st.sort()

summary = []
summary.append("#" * 79)
summary.append("######## %s  ---  %s ########" % (T1, T2))
summary.append("#" * 79)
summary.append(st.__str__(extended=True))
if exceptions:
    summary.append("#" * 33 + " Exceptions  " + "#" * 33)
    summary += exceptions
summary.append("#" * 79)

trig = []
mutt = []
if st:
예제 #46
0
class Window(QtGui.QMainWindow):
    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        # Injected by the compile_and_import_ui_files() function.
        self.ui = asdf_sextant_window.Ui_MainWindow()  # NOQA
        self.ui.setupUi(self)

        self.provenance_list_model = QtGui.QStandardItemModel(
            self.ui.provenance_list_view)
        self.ui.provenance_list_view.setModel(self.provenance_list_model)

        # Station view.
        map_file = os.path.abspath(os.path.join(
            os.path.dirname(__file__), "resources/index.html"))
        self.ui.web_view.load(QtCore.QUrl.fromLocalFile(map_file))
        # Enable debugging of the web view.
        self.ui.web_view.settings().setAttribute(
            QtWebKit.QWebSettings.DeveloperExtrasEnabled, True)

        # Event view.
        map_file = os.path.abspath(os.path.join(
            os.path.dirname(__file__), "resources/index_event.html"))
        self.ui.events_web_view.load(QtCore.QUrl.fromLocalFile(map_file))
        # Enable debugging of the web view.
        self.ui.events_web_view.settings().setAttribute(
            QtWebKit.QWebSettings.DeveloperExtrasEnabled, True)

        self._state = {}

        self.ui.openASDF.triggered.connect(self.open_asdf_file)

        # Add right clickability to station view
        self.ui.station_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
        self.ui.station_view.customContextMenuRequested.connect(self.station_view_rightClicked)

        # Add right clickability to event view
        self.ui.event_tree_widget.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
        self.ui.event_tree_widget.customContextMenuRequested.connect(self.event_tree_widget_rightClicked)

        QtGui.QApplication.instance().focusChanged.connect(self.changed_widget_focus)

        tmp = tempfile.mkstemp("asdf_sextant")
        os.close(tmp[0])
        try:
            os.remove(tmp[1])
        except:
            pass
        self._tempfile = tmp[1] + ".svg"

    def __del__(self):
        try:
            os.remove(self._tempfile)
        except:
            pass

    def __connect_signal_and_slots(self):
        """
        Connect special signals and slots not covered by the named signals and
        slots from pyuic4.
        """
        self.ui.station_view.itemEntered.connect(
            self.on_station_view_itemEntered)
        self.ui.station_view.itemExited.connect(
            self.on_station_view_itemExited)

    def changed_widget_focus(self):
        if QtGui.QApplication.focusWidget() == self.ui.graph:
            # Access the state dictionary and iterate through all stations in graph then highlight statins on web view
            for station_id in self._state["station_id"]:
                sta = station_id.split('.')[0] + '.' + station_id.split('.')[1]
                # Run Java Script to highlight all selected stations in station view
                js_call = "highlightStation('{station}')".format(station=sta)
                self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)

    def build_event_tree_view(self):
        if not hasattr(self, "ds") or not self.ds:
            return
        self.events = self.ds.events
        self.ui.event_tree_widget.clear()

        items = []
        self._state["quake_ids"] = {}

        for event in self.events:
            if event.origins:
                org = event.preferred_origin() or event.origins[0]

                js_call = "addEvent('{event_id}', {latitude}, {longitude});"\
                    .format(event_id=event.resource_id.id,
                            latitude=org.latitude,
                            longitude=org.longitude)
                self.ui.events_web_view.page().mainFrame().evaluateJavaScript(
                    js_call)

            event_item = QtGui.QTreeWidgetItem(
                [event.resource_id.id],
                type=EVENT_VIEW_ITEM_TYPES["EVENT"])
            self._state["quake_ids"][event.resource_id.id] = event_item

            origin_item = QtGui.QTreeWidgetItem(["Origins"], type=-1)
            magnitude_item = QtGui.QTreeWidgetItem(["Magnitudes"], type=-1)
            focmec_item = QtGui.QTreeWidgetItem(["Focal Mechanisms"], type=-1)

            org_items = []
            for origin in event.origins:
                org_items.append(
                    QtGui.QTreeWidgetItem(
                        [origin.resource_id.id],
                        type=EVENT_VIEW_ITEM_TYPES["ORIGIN"]))
                self._state["quake_ids"][origin.resource_id.id] = org_items[-1]
            origin_item.addChildren(org_items)

            mag_items = []
            for magnitude in event.magnitudes:
                mag_items.append(
                    QtGui.QTreeWidgetItem(
                        [magnitude.resource_id.id],
                        type=EVENT_VIEW_ITEM_TYPES["MAGNITUDE"]))
                self._state["quake_ids"][magnitude.resource_id.id] = \
                    mag_items[-1]
            magnitude_item.addChildren(mag_items)

            focmec_items = []
            for focmec in event.focal_mechanisms:
                focmec_items.append(
                    QtGui.QTreeWidgetItem(
                        [focmec.resource_id.id],
                        type=EVENT_VIEW_ITEM_TYPES["FOCMEC"]))
                self._state["quake_ids"][focmec.resource_id.id] = \
                    focmec_items[-1]
            focmec_item.addChildren(focmec_items)

            event_item.addChildren([origin_item, magnitude_item, focmec_item])
            items.append(event_item)

        self.ui.event_tree_widget.insertTopLevelItems(0, items)

    def build_station_view_list(self):
        if not hasattr(self, "ds") or not self.ds:
            return
        self.ui.station_view.clear()

        items = []

        if self.ui.group_by_network_check_box.isChecked():
            for key, group in itertools.groupby(
                    self.ds.waveforms,
                    key=lambda x: x._station_name.split(".")[0]):
                network_item = QtGui.QTreeWidgetItem(
                    [key],
                    type=STATION_VIEW_ITEM_TYPES["NETWORK"])
                group = sorted(group, key=lambda x: x._station_name)
                for station in sorted(group, key=lambda x: x._station_name):
                    station_item = QtGui.QTreeWidgetItem([
                        station._station_name.split(".")[-1]],
                        type=STATION_VIEW_ITEM_TYPES["STATION"])

                    # Add children.
                    children = []
                    if "StationXML" in station.list():
                        children.append(
                            QtGui.QTreeWidgetItem(
                                ["StationXML"],
                                type=STATION_VIEW_ITEM_TYPES["STATIONXML"]))
                    for waveform in station.get_waveform_tags():
                        children.append(
                            QtGui.QTreeWidgetItem(
                                [waveform],
                                type=STATION_VIEW_ITEM_TYPES["WAVEFORM"]))
                    station_item.addChildren(children)

                    network_item.addChild(station_item)
                items.append(network_item)

        else:
            # Add all the waveforms and stations.
            for station in self.ds.waveforms:
                item = QtGui.QTreeWidgetItem(
                    [station._station_name],
                    type=STATION_VIEW_ITEM_TYPES["STATION"])

                # Add children.
                children = []
                if "StationXML" in station.list():
                    children.append(
                        QtGui.QTreeWidgetItem(
                            ["StationXML"],
                            type=STATION_VIEW_ITEM_TYPES["STATIONXML"]))
                for waveform in station.get_waveform_tags():
                    children.append(
                        QtGui.QTreeWidgetItem(
                            [waveform],
                            type=STATION_VIEW_ITEM_TYPES["WAVEFORM"]))
                item.addChildren(children)

                items.append(item)

        self.ui.station_view.insertTopLevelItems(0, items)

    def on_initial_view_push_button_released(self):
        self.reset_view()

    def show_provenance_for_id(self, prov_id):
        try:
            info = \
                self.ds.provenance.get_provenance_document_for_id(prov_id)
        except ASDFValueError as e:
            msg_box = QtGui.QMessageBox()
            msg_box.setText(e.args[0])
            msg_box.exec_()
            return

        # Find the item.
        item = self.provenance_list_model.findItems(info["name"])[0]
        index = self.provenance_list_model.indexFromItem(item)
        self.ui.provenance_list_view.setCurrentIndex(index)
        self.show_provenance_document(info["name"])
        self.ui.central_tab.setCurrentWidget(self.ui.provenance_tab)

    def show_referenced_object(self, object_type, object_id):
        if object_type.lower() == "provenance":
            self.show_provenance_for_id(object_id)
        else:
            self.show_event(attribute=object_type.lower(), object_id=object_id)

    def show_event(self, attribute, object_id):
        item = self._state["quake_ids"][object_id]
        self.ui.event_tree_widget.collapseAll()
        self.ui.event_tree_widget.setCurrentItem(item)

        self.on_event_tree_widget_itemClicked(item, 0)

        self.ui.central_tab.setCurrentWidget(self.ui.event_tab)

    def on_show_auxiliary_provenance_button_released(self):
        if "current_auxiliary_data_provenance_id" not in self._state or \
                not self._state["current_auxiliary_data_provenance_id"]:
            return
        self.show_provenance_for_id(
            self._state["current_auxiliary_data_provenance_id"])

    def on_references_push_button_released(self):
        if "current_station_object" not in self._state:
            return
        obj = self._state["current_station_object"]

        popup = QtGui.QMenu()

        for waveform in obj.list():
            if not waveform.endswith(
                    "__" + self._state["current_waveform_tag"]):
                continue
            menu = popup.addMenu(waveform)
            attributes = dict(
                self.ds._waveform_group[obj._station_name][waveform].attrs)

            for key, value in sorted([_i for _i in attributes.items()],
                                     key=lambda x: x[0]):
                if not key.endswith("_id"):
                    continue
                key = key[:-3].capitalize()

                try:
                    value = value.decode()
                except:
                    pass

                def get_action_fct():
                    _key = key
                    _value = value

                    def _action(check):
                        self.show_referenced_object(_key, _value)

                    return _action

                # Bind with a closure.
                menu.addAction("%s: %s" % (key, value)).triggered.connect(
                    get_action_fct())

        popup.exec_(self.ui.references_push_button.parentWidget().mapToGlobal(
                    self.ui.references_push_button.pos()))

    def create_asdf_sql(self, sta):
        # Function to separate the waveform string into seperate fields
        def waveform_sep(ws):
            a = ws.split('__')
            starttime = int(UTCDateTime(a[1].encode('ascii')).timestamp)
            endtime = int(UTCDateTime(a[2].encode('ascii')).timestamp)

            # Returns: (station_id, starttime, endtime, waveform_tag)
            return (ws.encode('ascii'), a[0].encode('ascii'), starttime, endtime, a[3].encode('ascii'))

        # Get the SQL file for station
        SQL_filename = r""+os.path.dirname(self.filename)+ '/' + str(sta.split('.')[1]) + '.db'

        check_SQL = exists(SQL_filename)

        if check_SQL:
            return
        # need to create SQL database
        elif not check_SQL:
            # Initialize (open/create) the sqlalchemy sqlite engine
            engine = create_engine('sqlite:///' + SQL_filename)
            Session = sessionmaker()

            # Get list of all waveforms for station
            waveforms_list = self.ds.waveforms[str(sta)].list()
            #remove the station XML file
            waveforms_list.remove('StationXML')

            # Create all tables in the engine
            Base.metadata.create_all(engine)

            # Initiate a session with the SQL database so that we can add data to it
            Session.configure(bind=engine)
            session = Session()

            progressDialog = QtGui.QProgressDialog("Building SQL Library for Station {0}".format(str(sta)),
                                                   "Cancel", 0, len(waveforms_list))

            # go through the waveforms (ignore stationxml file)
            for _i, sta_wave in enumerate(waveforms_list):
                progressDialog.setValue(_i)

                # The ASDF formatted waveform name for SQL [full_id, station_id, starttime, endtime, tag]
                waveform_info = waveform_sep(sta_wave)

                # create new SQL entry
                new_wave_SQL = Waveforms(full_id=waveform_info[0], station_id=waveform_info[1],
                                         starttime=waveform_info[2],
                                         endtime=waveform_info[3], tag=waveform_info[4])

                # Add the waveform info to the session
                session.add(new_wave_SQL)
                session.commit()

    def open_asdf_file(self):
        """
        Fill the station tree widget upon opening a new file.
        """
        self.filename = str(QtGui.QFileDialog.getOpenFileName(
            parent=self, caption="Choose File",
            directory=os.path.expanduser("~"),
            filter="ASDF files (*.h5)"))
        if not self.filename:
            return

        self.ds = pyasdf.ASDFDataSet(self.filename)

        for station_id, coordinates in self.ds.get_all_coordinates().items():
            if not coordinates:
                continue
            js_call = "addStation('{station_id}', {latitude}, {longitude})"
            self.ui.web_view.page().mainFrame().evaluateJavaScript(
                js_call.format(station_id=station_id,
                               latitude=coordinates["latitude"],
                               longitude=coordinates["longitude"]))

        self.build_station_view_list()
        self.build_event_tree_view()

        # Add all the provenance items
        self.provenance_list_model.clear()
        for provenance in self.ds.provenance.list():
            item = QtGui.QStandardItem(provenance)
            self.provenance_list_model.appendRow(item)

        # Also add the auxiliary data.

        def recursive_tree(name, item):
            if isinstance(item, pyasdf.utils.AuxiliaryDataAccessor):
                data_type_item = QtGui.QTreeWidgetItem(
                    [name],
                    type=AUX_DATA_ITEM_TYPES["DATA_TYPE"])
                children = []
                for sub_item in item.list():
                    children.append(recursive_tree(sub_item, item[sub_item]))
                data_type_item.addChildren(children)
            elif isinstance(item, pyasdf.utils.AuxiliaryDataContainer):
                data_type_item = QtGui.QTreeWidgetItem(
                    [name],
                    type=AUX_DATA_ITEM_TYPES["DATA_ITEM"])
            else:
                raise NotImplementedError
            return data_type_item

        items = []
        for data_type in self.ds.auxiliary_data.list():
            items.append(recursive_tree(data_type,
                                        self.ds.auxiliary_data[data_type]))
        self.ui.auxiliary_data_tree_view.insertTopLevelItems(0, items)

        sb = self.ui.status_bar
        if hasattr(sb, "_widgets"):
            for i in sb._widgets:
                sb.removeWidget(i)

        w = QtGui.QLabel("File: %s    (%s)" % (self.ds.filename,
                                               self.ds.pretty_filesize))
        sb._widgets = [w]
        sb.addPermanentWidget(w)
        w.show()
        sb.show()
        sb.reformat()

    def on_detrend_and_demean_check_box_stateChanged(self, state):
        self.update_waveform_plot()

    def on_normalize_check_box_stateChanged(self, state):
        self.update_waveform_plot()

    def on_group_by_network_check_box_stateChanged(self, state):
        self.build_station_view_list()

    def update_waveform_plot(self):
        self.ui.central_tab.setCurrentIndex(0)
        self.ui.initial_view_push_button.setEnabled(True)
        self.ui.previous_view_push_button.setEnabled(True)
        self.ui.previous_interval_push_button.setEnabled(True)
        self.ui.next_interval_push_button.setEnabled(True)

        # Get the filter settings.
        filter_settings = {}
        filter_settings["detrend_and_demean"] = \
            self.ui.detrend_and_demean_check_box.isChecked()
        filter_settings["normalize"] = self.ui.normalize_check_box.isChecked()

        temp_st = self.st.copy()

        if filter_settings["detrend_and_demean"]:
            temp_st.detrend("linear")
            temp_st.detrend("demean")

        if filter_settings["normalize"]:
            temp_st.normalize()

        self.ui.graph.clear()

        starttimes = []
        endtimes = []
        min_values = []
        max_values = []

        self._state["waveform_plots"] = []
        self._state["station_id"] = []
        self._state["station_tag"] = []
        for _i, tr in enumerate(temp_st):
            plot = self.ui.graph.addPlot(
                _i, 0, title=tr.id,
                axisItems={'bottom': DateAxisItem(orientation='bottom',
                                                  utcOffset=0)})
            plot.show()
            self._state["waveform_plots"].append(plot)
            self._state["station_id"].append(tr.stats.network+'.'+
                                               tr.stats.station+'.'+
                                               tr.stats.location+'.'+
                                               tr.stats.channel)
            self._state["station_tag"].append(str(tr.stats.asdf.tag))
            plot.plot(tr.times() + tr.stats.starttime.timestamp, tr.data)
            starttimes.append(tr.stats.starttime)
            endtimes.append(tr.stats.endtime)
            min_values.append(tr.data.min())
            max_values.append(tr.data.max())

        self._state["waveform_plots_min_time"] = min(starttimes)
        self._state["waveform_plots_max_time"] = max(endtimes)
        self._state["waveform_plots_min_value"] = min(min_values)
        self._state["waveform_plots_max_value"] = max(max_values)


        for plot in self._state["waveform_plots"][1:]:
            plot.setXLink(self._state["waveform_plots"][0])
            plot.setYLink(self._state["waveform_plots"][0])

        self.reset_view()

    def on_previous_interval_push_button_released(self):
        # Get start and end time of previous interval with 10% overlap
        starttime = UTCDateTime(self._state["waveform_plots_min_time"])
        endtime = UTCDateTime(self._state["waveform_plots_max_time"])

        delta_time = endtime - starttime
        overlap_time = delta_time * 0.1

        self.new_start_time = starttime - (delta_time - overlap_time)
        self.new_end_time = starttime + overlap_time

        self.extract_from_continuous(True, st_ids=self._state["station_id"],
                                     st_tags=self._state["station_tag"])

    def on_next_interval_push_button_released(self):
        # Get start and end time of next interval with 10% overlap
        starttime = UTCDateTime(self._state["waveform_plots_min_time"])
        endtime = UTCDateTime(self._state["waveform_plots_max_time"])

        delta_time = endtime - starttime
        overlap_time = delta_time * 0.1

        self.new_start_time = endtime - (overlap_time)
        self.new_end_time = endtime + (delta_time - overlap_time)

        self.extract_from_continuous(True, st_ids = self._state["station_id"],
                                     st_tags = self._state["station_tag"])

    def reset_view(self):
        self._state["waveform_plots"][0].setXRange(
            self._state["waveform_plots_min_time"].timestamp,
            self._state["waveform_plots_max_time"].timestamp)
        min_v = self._state["waveform_plots_min_value"]
        max_v = self._state["waveform_plots_max_value"]

        y_range = max_v - min_v
        min_v -= 0.1 * y_range
        max_v += 0.1 * y_range
        self._state["waveform_plots"][0].setYRange(min_v, max_v)

    def show_provenance_document(self, document_name):
        doc = self.ds.provenance[document_name]
        doc.plot(filename=self._tempfile, use_labels=True)

        self.ui.provenance_graphics_view.open_file(self._tempfile)

    def on_station_view_itemClicked(self, item, column):
        t = item.type()

        def get_station(item):
            station = item.text(0)
            if "." not in station:
                station = item.parent().text(0) + "." + station
            return station

        if t == STATION_VIEW_ITEM_TYPES["NETWORK"]:
            pass
        elif t == STATION_VIEW_ITEM_TYPES["STATION"]:
            station = get_station(item)
            #Run Method to create ASDF SQL database with SQLite (one db per station within ASDF)
            self.create_asdf_sql(station)
        elif t == STATION_VIEW_ITEM_TYPES["STATIONXML"]:
            station = get_station(item.parent())
            self.ds.waveforms[station].StationXML.plot()#plot_response(0.001)
        elif t == STATION_VIEW_ITEM_TYPES["WAVEFORM"]:
            station = get_station(item.parent())
            self._state["current_station_object"] = self.ds.waveforms[station]
            self._state["current_waveform_tag"] = item.text(0)
            self.st = self.ds.waveforms[station][str(item.text(0))]
            self.update_waveform_plot()
        else:
            pass

    def station_view_rightClicked(self, position):
        item = self.ui.station_view.selectedItems()[0]

        t = item.type()

        def get_station(item):
            station = item.text(0)
            if "." not in station:
                station = item.parent().text(0) + "." + station
            return station

        if t == STATION_VIEW_ITEM_TYPES["NETWORK"]:
            self.net_item_menu = QtGui.QMenu(self)
            ext_menu = QtGui.QMenu('Select NSLC', self)
        elif t == STATION_VIEW_ITEM_TYPES["STATIONXML"]:
            pass
        elif t == STATION_VIEW_ITEM_TYPES["WAVEFORM"]:
            pass
        elif t == STATION_VIEW_ITEM_TYPES["STATION"]:
            station = get_station(item)
            wave_tag_list = self.ds.waveforms[station].get_waveform_tags()

            # Run Method to create ASDF SQL database with SQLite (one db per station within ASDF)
            self.create_asdf_sql(station)

            self.sta_item_menu = QtGui.QMenu(self)
            ext_menu = QtGui.QMenu('Extract Time Interval', self)

            # Add actions for each tag for station
            for wave_tag in wave_tag_list:
                action = QtGui.QAction(wave_tag, self)
                # Connect the triggered menu object to a function passing an extra variable
                action.triggered.connect(lambda: self.extract_from_continuous(False, sta=station, wave_tag=wave_tag))
                ext_menu.addAction(action)

            self.sta_item_menu.addMenu(ext_menu)

            self.action = self.sta_item_menu.exec_(self.ui.station_view.viewport().mapToGlobal(position))

    def on_event_tree_widget_itemClicked(self, item, column):
        t = item.type()
        if t not in EVENT_VIEW_ITEM_TYPES.values():
            return

        text = str(item.text(0))

        res_id = obspy.core.event.ResourceIdentifier(id=text)

        obj = res_id.get_referred_object()
        if obj is None:
            self.events = self.ds.events
        self.ui.events_text_browser.setPlainText(
            str(res_id.get_referred_object()))

        if t == EVENT_VIEW_ITEM_TYPES["EVENT"]:
            event = text
        elif t == EVENT_VIEW_ITEM_TYPES["ORIGIN"]:
            event = str(item.parent().parent().text(0))
        elif t == EVENT_VIEW_ITEM_TYPES["MAGNITUDE"]:
            event = str(item.parent().parent().text(0))
        elif t == EVENT_VIEW_ITEM_TYPES["FOCMEC"]:
            event = str(item.parent().parent().text(0))

        js_call = "highlightEvent('{event_id}');".format(event_id=event)
        self.ui.events_web_view.page().mainFrame().evaluateJavaScript(js_call)

    def event_tree_widget_rightClicked(self, position):
        item = self.ui.event_tree_widget.selectedItems()[0]

        t = item.type()
        if t not in EVENT_VIEW_ITEM_TYPES.values():
            return
        text = str(item.text(0))
        res_id = obspy.core.event.ResourceIdentifier(id=text)

        obj = res_id.get_referred_object()
        if obj is None:
            self.events = self.ds.events
        self.ui.events_text_browser.setPlainText(
            str(res_id.get_referred_object()))

        if t == EVENT_VIEW_ITEM_TYPES["EVENT"]:
            event = text
        elif t == EVENT_VIEW_ITEM_TYPES["ORIGIN"]:
            event = str(item.parent().parent().text(0))
        elif t == EVENT_VIEW_ITEM_TYPES["MAGNITUDE"]:
            event = str(item.parent().parent().text(0))
        elif t == EVENT_VIEW_ITEM_TYPES["FOCMEC"]:
            event = str(item.parent().parent().text(0))

        self.event_item_menu = QtGui.QMenu(self)

        action = QtGui.QAction('Plot Event', self)
        # Connect the triggered menu object to a function passing an extra variable
        action.triggered.connect(lambda: self.analyse_earthquake(obj))
        self.event_item_menu.addAction(action)

        # ext_menu = QtGui.QMenu('Extract Time Interval', self)
        #
        # # Add actions for each tag for station
        # for wave_tag in wave_tag_list:
        #     action = QtGui.QAction(wave_tag, self)
        #     # Connect the triggered menu object to a function passing an extra variable
        #     action.triggered.connect(lambda: self.extract_from_continuous(False, sta=station, wave_tag=wave_tag))
        #     ext_menu.addAction(action)
        #
        # self.event_item_menu.addMenu(ext_menu)
        #


        self.action = self.event_item_menu.exec_(self.ui.station_view.viewport().mapToGlobal(position))

    def on_auxiliary_data_tree_view_itemClicked(self, item, column):
        t = item.type()
        if t != AUX_DATA_ITEM_TYPES["DATA_ITEM"]:
            return

        tag = str(item.text(0))

        def recursive_path(item):
            p = item.parent()
            if p is None:
                return []
            path = [str(p.text(0))]
            path.extend(recursive_path(p))
            return path

        # Find the full path.
        path = recursive_path(item)
        path.reverse()

        graph = self.ui.auxiliary_data_graph
        graph.clear()

        group = self.ds.auxiliary_data["/".join(path)]
        aux_data = group[tag]

        if len(aux_data.data.shape) == 1 and path[0] != "Files":
            plot = graph.addPlot(title="%s/%s" % ("/".join(path), tag))
            plot.show()
            plot.plot(aux_data.data.value)
            self.ui.auxiliary_data_stacked_widget.setCurrentWidget(
                self.ui.auxiliary_data_graph_page)
        # Files are a bit special.
        elif len(aux_data.data.shape) == 1 and path[0] == "Files":
            self.ui.auxiliary_file_browser.setPlainText(
                aux_data.file.read().decode())
            self.ui.auxiliary_data_stacked_widget.setCurrentWidget(
                self.ui.auxiliary_data_file_page)
        # 2D Shapes.
        elif len(aux_data.data.shape) == 2:
            img = pg.ImageItem(border="#3D8EC9")
            img.setImage(aux_data.data.value)
            vb = graph.addViewBox()
            vb.setAspectLocked(True)
            vb.addItem(img)
            self.ui.auxiliary_data_stacked_widget.setCurrentWidget(
                self.ui.auxiliary_data_graph_page)
        # Anything else is currently not supported.
        else:
            raise NotImplementedError

        # Show the parameters.
        tv = self.ui.auxiliary_data_detail_table_view
        tv.clear()

        self._state["current_auxiliary_data_provenance_id"] = \
            aux_data.provenance_id
        if aux_data.provenance_id:
            self.ui.show_auxiliary_provenance_button.setEnabled(True)
        else:
            self.ui.show_auxiliary_provenance_button.setEnabled(False)

        tv.setRowCount(len(aux_data.parameters))
        tv.setColumnCount(2)
        tv.setHorizontalHeaderLabels(["Parameter", "Value"])
        tv.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch)
        tv.verticalHeader().hide()

        for _i, key in enumerate(sorted(aux_data.parameters.keys())):
            key_item = QtGui.QTableWidgetItem(key)
            value_item = QtGui.QTableWidgetItem(str(aux_data.parameters[key]))

            tv.setItem(_i, 0, key_item)
            tv.setItem(_i, 1, value_item)

        # Show details about the data.
        details = [
            ("shape", str(aux_data.data.shape)),
            ("dtype", str(aux_data.data.dtype)),
            ("dimensions", str(len(aux_data.data.shape))),
            ("uncompressed size", sizeof_fmt(
                aux_data.data.dtype.itemsize * aux_data.data.size))]

        tv = self.ui.auxiliary_data_info_table_view
        tv.clear()

        tv.setRowCount(len(details))
        tv.setColumnCount(2)
        tv.setHorizontalHeaderLabels(["Attribute", "Value"])
        tv.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch)
        tv.verticalHeader().hide()

        for _i, item in enumerate(details):
            key_item = QtGui.QTableWidgetItem(item[0])
            value_item = QtGui.QTableWidgetItem(item[1])

            tv.setItem(_i, 0, key_item)
            tv.setItem(_i, 1, value_item)

    def on_provenance_list_view_clicked(self, model_index):
        # Compat for different pyqt/sip versions.
        try:
            data = str(model_index.data().toString())
        except:
            data = str(model_index.data())

        self.show_provenance_document(data)

    def on_station_view_itemEntered(self, item):
        t = item.type()

        def get_station(item, parent=True):
            if parent:
                station = str(item.parent().text(0))
                if "." not in station:
                    station = item.parent().parent().text(0) + "." + station
            else:
                station = item.text(0)
                if "." not in station:
                    station = item.parent().text(0) + "." + station
            return station

        if t == STATION_VIEW_ITEM_TYPES["NETWORK"]:
            network = item.text(0)
            js_call = "highlightNetwork('{network}')".format(network=network)
            self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)
        elif t == STATION_VIEW_ITEM_TYPES["STATION"]:
            station = get_station(item, parent=False)
            js_call = "highlightStation('{station}')".format(station=station)
            self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)
        elif t == STATION_VIEW_ITEM_TYPES["STATIONXML"]:
            station = get_station(item)
            js_call = "highlightStation('{station}')".format(station=station)
            self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)
        elif t == STATION_VIEW_ITEM_TYPES["WAVEFORM"]:
            station = get_station(item)
            js_call = "highlightStation('{station}')".format(station=station)
            self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)
        else:
            pass

    def on_station_view_itemExited(self, *args):
        js_call = "setAllInactive()"
        self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)

    def query_sql_db(self, query, sql_filename, sta):
        # Open a new st object
        st = Stream()

        # Initialize (open/create) the sqlalchemy sqlite engine
        engine = create_engine('sqlite:///' + sql_filename)
        Session = sessionmaker()
        Session.configure(bind=engine)
        session = Session()

        for matched_waveform in session.query(Waveforms).filter(query):
            st += self.ds.waveforms[sta][matched_waveform.full_id]

        return(st)

    def extract_from_continuous(self, override, **kwargs):
        # Open a new st object
        self.st = Stream()

        # If override flag then we are calling this
        # method by using prev/next interval buttons
        if override:
            interval_tuple = (self.new_start_time.timestamp, self.new_end_time.timestamp)
            for _i, st_id in enumerate(kwargs['st_ids']):

                sta = str(st_id.split('.')[0])+'.'+str(st_id.split('.')[1])
                # Get the SQL file for station
                SQL_filename = r""+os.path.dirname(self.filename) + '/' + str(st_id.split('.')[1]) + '.db'

                query_stmt = text("Waveforms.tag == :tag AND "
                                  "Waveforms.station_id == :stid AND ("
                                  "(Waveforms.starttime >= :start AND :end >= Waveforms.endtime) OR"
                                  "(Waveforms.starttime <= :end AND :end <= Waveforms.endtime) OR"
                                  "(Waveforms.starttime <= :start AND :start <= Waveforms.endtime))")

                query_stmt = query_stmt.bindparams(stid=st_id, start=interval_tuple[0], end=interval_tuple[1],
                                                   tag=kwargs['st_tags'][_i])

                ret_st = self.query_sql_db(query_stmt, SQL_filename, sta)

                self.st += ret_st



        elif not override:
            # Launch the custom extract time dialog
            dlg = timeDialog(self)
            if dlg.exec_():
                values = dlg.getValues()
                interval_tuple = (values[0].timestamp, values[1].timestamp)

                # Get the SQL file for station
                SQL_filename = r""+os.path.dirname(self.filename) + '/' + str(kwargs['sta'].split('.')[1]) + '.db'

                query_stmt = text("Waveforms.tag == :tag AND ("
                                  "(Waveforms.starttime >= :start AND :end >= Waveforms.endtime) OR"
                                  "(Waveforms.starttime <= :end AND :end <= Waveforms.endtime) OR"
                                  "(Waveforms.starttime <= :start AND :start <= Waveforms.endtime))")

                query_stmt = query_stmt.bindparams(start=interval_tuple[0],end=interval_tuple[1],
                                                   tag=kwargs['wave_tag'])

                ret_st = self.query_sql_db(query_stmt, SQL_filename, kwargs['sta'])

                self.st += ret_st


        if self.st.__nonzero__():
            # Attempt to merge all traces with matching ID'S in place
            self.st.merge()
            self.st.trim(starttime=UTCDateTime(interval_tuple[0]), endtime=UTCDateTime(interval_tuple[1]))
            self.update_waveform_plot()
        else:
            msg = QtGui.QMessageBox()
            msg.setIcon(QtGui.QMessageBox.Critical)
            msg.setText("No Data for Requested Time Interval")
            msg.setDetailedText("There are no waveforms to display for selected time interval:"
                                "\nStart Time = "+str(UTCDateTime(interval_tuple[0],precision=0))+
                                "\nEnd Time =   "+str(UTCDateTime(interval_tuple[1],precision=0)))
            msg.setWindowTitle("Extract Time Error")
            msg.setStandardButtons(QtGui.QMessageBox.Ok)
            msg.exec_()

    def analyse_earthquake(self, event_obj):
        # Get event catalogue
        self.event_cat = self.ds.events
        comp_list = ['*Z', '*N', '*E']


        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self, sta_list=self.ds.waveforms.list())
        if sel_dlg.exec_():
            select_sta, bool_comp = sel_dlg.getSelected()
            query_comp = list(itertools.compress(comp_list, bool_comp))

            # Open up a new stream object
            self.st = Stream()

            # use the ifilter functionality to extract desired streams to visualize
            for station in self.ds.ifilter(self.ds.q.station == map(lambda el: el.split('.')[1], select_sta),
                                           self.ds.q.channel == query_comp,
                                           self.ds.q.event == event_obj):
                for filtered_id in station.list():
                    if filtered_id == 'StationXML':
                        continue
                    self.st += station[filtered_id]

            if self.st.__nonzero__():
                # Get quake origin info
                origin_info = event_obj.preferred_origin() or event_obj.origins[0]

                # Iterate through traces
                for tr in self.st:
                    # Run Java Script to highlight all selected stations in station view
                    js_call = "highlightStation('{station}')".format(station=tr.stats.network + '.' +tr.stats.station)
                    self.ui.web_view.page().mainFrame().evaluateJavaScript(js_call)


                    # Get inventory for trace
                    inv = self.ds.waveforms[tr.stats.network + '.' +tr.stats.station].StationXML
                    sta_coords = inv.get_coordinates(tr.get_id())

                    dist, baz, _ = gps2dist_azimuth(sta_coords['latitude'],
                                                    sta_coords['longitude'],
                                                    origin_info.latitude,
                                                    origin_info.longitude)
                    dist_deg = kilometer2degrees(dist/1000.0)
                    tt_model = TauPyModel(model='iasp91')
                    arrivals = tt_model.get_travel_times(origin_info.depth/1000.0, dist_deg, ('P'))

                    # Write info to trace header
                    tr.stats.distance = dist
                    tr.stats.ptt = arrivals[0]

                # Sort the st by distance from quake
                self.st.sort(keys=['distance'])


                self.update_waveform_plot()
예제 #47
0
def preprocess(db, stations, comps, goal_day, params, responses=None):
    """
    Fetches data for each ``stations`` and each ``comps`` using the
    data_availability table in the database.

    To correct for instrument responses, make sure to set ``remove_response``
    to "Y" in the config and to provide the ``responses`` DataFrame.

    :Example:
    >>> from msnoise.api import connect, get_params, preload_instrument_responses
    >>> from msnoise.preprocessing import preprocess
    >>> db = connect()
    >>> params = get_params(db)
    >>> responses = preload_instrument_responses(db)
    >>> st = preprocess(db, ["YA.UV06","YA.UV10"], ["Z",], "2010-09-01", params, responses)
    >>> st
     2 Trace(s) in Stream:
    YA.UV06.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples
    YA.UV10.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples

    :type db: :class:`sqlalchemy.orm.session.Session`
    :param db: A :class:`~sqlalchemy.orm.session.Session` object, as
        obtained by :func:`msnoise.api.connect`.
    :type stations: list of str
    :param stations: a list of station names, in the format NET.STA.
    :type comps: list of str
    :param comps: a list of component names, in Z,N,E,1,2.
    :type goal_day: str
    :param goal_day: the day of data to load, ISO 8601 format: e.g. 2016-12-31.
    :type params: class
    :param params: an object containing the config parameters, as obtained by
        :func:`msnoise.api.get_params`.
    :type responses: :class:`pandas.DataFrame`
    :param responses: a DataFrame containing the instrument responses, as
        obtained by :func:`msnoise.api.preload_instrument_responses`.
    :rtype: :class:`obspy.core.stream.Stream`
    :return: A Stream object containing all traces.
    """
    datafiles = {}
    output = Stream()
    for station in stations:
        datafiles[station] = {}
        net, sta = station.split('.')
        gd = datetime.datetime.strptime(goal_day, '%Y-%m-%d')
        files = get_data_availability(db,
                                      net=net,
                                      sta=sta,
                                      starttime=gd,
                                      endtime=gd)
        for comp in comps:
            datafiles[station][comp] = []
        for file in files:
            if file.comp[-1] not in comps:
                continue
            fullpath = os.path.join(file.path, file.file)
            datafiles[station][file.comp[-1]].append(fullpath)

    for istation, station in enumerate(stations):
        net, sta = station.split(".")
        for comp in comps:
            files = eval("datafiles['%s']['%s']" % (station, comp))
            if len(files) != 0:
                logging.debug("%s.%s Reading %i Files" %
                              (station, comp, len(files)))
                stream = Stream()
                for file in sorted(files):
                    try:
                        st = read(file,
                                  dytpe=np.float,
                                  starttime=UTCDateTime(gd),
                                  endtime=UTCDateTime(gd) + 86400)
                    except:
                        logging.debug("ERROR reading file %s" % file)
                        continue
                    for tr in st:
                        if len(tr.stats.channel) == 2:
                            tr.stats.channel += tr.stats.location
                            tr.stats.location = "00"
                    tmp = st.select(network=net, station=sta, component=comp)
                    if not len(tmp):
                        for tr in st:
                            tr.stats.network = net
                        st = st.select(network=net,
                                       station=sta,
                                       component=comp)
                    else:
                        st = tmp
                    for tr in st:
                        tr.data = tr.data.astype(np.float)
                        tr.stats.network = tr.stats.network.upper()
                        tr.stats.station = tr.stats.station.upper()
                        tr.stats.channel = tr.stats.channel.upper()

                    stream += st
                    del st
                stream.sort()
                try:
                    # HACK not super clean... should find a way to prevent the
                    # same trace id with different sps to occur
                    stream.merge(method=1,
                                 interpolation_samples=3,
                                 fill_value=None)
                except:
                    logging.debug("Error while merging...")
                    traceback.print_exc()
                    continue
                stream = stream.split()
                if not len(stream):
                    continue
                logging.debug("%s Checking sample alignment" % stream[0].id)
                for i, trace in enumerate(stream):
                    stream[i] = check_and_phase_shift(trace)

                logging.debug("%s Checking Gaps" % stream[0].id)
                if len(getGaps(stream)) > 0:
                    max_gap = params.preprocess_max_gap * stream[
                        0].stats.sampling_rate
                    only_too_long = False
                    while getGaps(stream) and not only_too_long:
                        too_long = 0
                        gaps = getGaps(stream)
                        for gap in gaps:
                            if int(gap[-1]) <= max_gap:
                                try:
                                    stream[gap[0]] = stream[gap[0]].__add__(
                                        stream[gap[1]],
                                        method=1,
                                        fill_value="interpolate")
                                    stream.remove(stream[gap[1]])
                                except:
                                    stream.remove(stream[gap[1]])

                                break
                            else:
                                too_long += 1
                        if too_long == len(gaps):
                            only_too_long = True

                stream = stream.split()
                for tr in stream:
                    if tr.stats.sampling_rate < (params.goal_sampling_rate -
                                                 1):
                        stream.remove(tr)
                taper_length = 20.0  # seconds
                for trace in stream:
                    if trace.stats.npts < 4 * taper_length * trace.stats.sampling_rate:
                        stream.remove(trace)
                    else:
                        trace.detrend(type="demean")
                        trace.detrend(type="linear")
                        trace.taper(max_percentage=None, max_length=1.0)

                if not len(stream):
                    logging.debug(" has only too small traces, skipping...")
                    continue

                for trace in stream:
                    logging.debug("%s Highpass at %.2f Hz" %
                                  (trace.id, params.preprocess_highpass))
                    trace.filter("highpass",
                                 freq=params.preprocess_highpass,
                                 zerophase=True)

                    if trace.stats.sampling_rate != params.goal_sampling_rate:
                        logging.debug("%s Lowpass at %.2f Hz" %
                                      (trace.id, params.preprocess_lowpass))
                        trace.filter("lowpass",
                                     freq=params.preprocess_lowpass,
                                     zerophase=True,
                                     corners=8)

                        if params.resampling_method == "Resample":
                            logging.debug(
                                "%s Downsample to %.1f Hz" %
                                (trace.id, params.goal_sampling_rate))
                            trace.data = resample(
                                trace.data, params.goal_sampling_rate /
                                trace.stats.sampling_rate, 'sinc_fastest')

                        elif params.resampling_method == "Decimate":
                            decimation_factor = trace.stats.sampling_rate / params.goal_sampling_rate
                            if not int(decimation_factor) == decimation_factor:
                                logging.warning(
                                    "%s CANNOT be decimated by an integer factor, consider using Resample or Lanczos methods"
                                    " Trace sampling rate = %i ; Desired CC sampling rate = %i"
                                    % (trace.id, trace.stats.sampling_rate,
                                       params.goal_sampling_rate))
                                sys.stdout.flush()
                                sys.exit()
                            logging.debug("%s Decimate by a factor of %i" %
                                          (trace.id, decimation_factor))
                            trace.data = trace.data[::int(decimation_factor)]

                        elif params.resampling_method == "Lanczos":
                            logging.debug(
                                "%s Downsample to %.1f Hz" %
                                (trace.id, params.goal_sampling_rate))
                            trace.data = np.array(trace.data)
                            trace.interpolate(
                                method="lanczos",
                                sampling_rate=params.goal_sampling_rate,
                                a=1.0)

                        trace.stats.sampling_rate = params.goal_sampling_rate

                if params.remove_response:
                    logging.debug('%s Removing instrument response' %
                                  stream[0].id)

                    response = responses[responses["channel_id"] ==
                                         stream[0].id]
                    if len(response) > 1:
                        response = response[
                            response["start_date"] <= UTCDateTime(gd)]
                    if len(response) > 1:
                        response = response[
                            response["end_date"] >= UTCDateTime(gd)]
                    elif len(response) == 0:
                        logging.info("No instrument response information "
                                     "for %s, skipping" % stream[0].id)
                        continue
                    try:
                        datalesspz = response["paz"].values[0]
                    except:
                        logging.error("Bad instrument response information "
                                      "for %s, skipping" % stream[0].id)
                        continue
                    stream.simulate(
                        paz_remove=datalesspz,
                        remove_sensitivity=True,
                        pre_filt=params.response_prefilt,
                        paz_simulate=None,
                    )
                for tr in stream:
                    tr.data = tr.data.astype(np.float32)
                output += stream
                del stream
            del files
    clean_scipy_cache()
    return output
예제 #48
0
def preprocess(db, stations, comps, goal_day, params, responses=None):
    """
    Fetches data for each ``stations`` and each ``comps`` using the
    data_availability table in the database.

    To correct for instrument responses, make sure to set ``remove_response``
    to "Y" in the config and to provide the ``responses`` DataFrame.

    :Example:

    >>> from msnoise.api import connect, get_params, preload_instrument_responses
    >>> from msnoise.preprocessing import preprocess
    >>> db = connect()
    >>> params = get_params(db)
    >>> responses = preload_instrument_responses(db)
    >>> st = preprocess(db, ["YA.UV06","YA.UV10"], ["Z",], "2010-09-01", params, responses)
    >>> st
     2 Trace(s) in Stream:
    YA.UV06.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples
    YA.UV10.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples

    :type db: :class:`sqlalchemy.orm.session.Session`
    :param db: A :class:`~sqlalchemy.orm.session.Session` object, as
        obtained by :func:`msnoise.api.connect`.
    :type stations: list of str
    :param stations: a list of station names, in the format NET.STA.
    :type comps: list of str
    :param comps: a list of component names, in Z,N,E,1,2.
    :type goal_day: str
    :param goal_day: the day of data to load, ISO 8601 format: e.g. 2016-12-31.
    :type params: class
    :param params: an object containing the config parameters, as obtained by
        :func:`msnoise.api.get_params`.
    :type responses: :class:`pandas.DataFrame`
    :param responses: a DataFrame containing the instrument responses, as
        obtained by :func:`msnoise.api.preload_instrument_responses`.
    :rtype: :class:`obspy.core.stream.Stream`
    :return: A Stream object containing all traces.
    """
    datafiles = {}
    output = Stream()
    for station in stations:
        datafiles[station] = {}
        net, sta = station.split('.')
        gd = datetime.datetime.strptime(goal_day, '%Y-%m-%d')
        files = get_data_availability(
            db, net=net, sta=sta, starttime=gd, endtime=gd)
        for comp in comps:
            datafiles[station][comp] = []
        for file in files:
            if file.comp[-1] not in comps:
                continue
            fullpath = os.path.join(file.path, file.file)
            datafiles[station][file.comp[-1]].append(fullpath)

    for istation, station in enumerate(stations):
        net, sta = station.split(".")
        for comp in comps:
            files = eval("datafiles['%s']['%s']" % (station, comp))
            if len(files) != 0:
                logger.debug("%s.%s Reading %i Files" %
                              (station, comp, len(files)))
                stream = Stream()
                for file in sorted(files):
                    try:
                        st = read(file, dytpe=np.float,
                              starttime=UTCDateTime(gd),
                              endtime=UTCDateTime(gd)+86400)
                    except:
                        logger.debug("ERROR reading file %s" % file)
                        continue
                    for tr in st:
                        if len(tr.stats.channel) == 2:
                            tr.stats.channel += tr.stats.location
                            tr.stats.location = "00"
                    tmp = st.select(network=net, station=sta, component=comp)
                    if not len(tmp):
                        for tr in st:
                            tr.stats.network = net
                        st = st.select(network=net, station=sta, component=comp)
                    else:
                        st = tmp
                    for tr in st:
                        tr.data = tr.data.astype(np.float)
                        tr.stats.network = tr.stats.network.upper()
                        tr.stats.station = tr.stats.station.upper()
                        tr.stats.channel = tr.stats.channel.upper()

                    stream += st
                    del st
                stream.sort()
                try:
                    # HACK not super clean... should find a way to prevent the
                    # same trace id with different sps to occur
                    stream.merge(method=1, interpolation_samples=3, fill_value=None)
                except:
                    logger.debug("Error while merging...")
                    traceback.print_exc()
                    continue
                stream = stream.split()
                if not len(stream):
                    continue
                logger.debug("%s Checking sample alignment" % stream[0].id)
                for i, trace in enumerate(stream):
                    stream[i] = check_and_phase_shift(trace)

                logger.debug("%s Checking Gaps" % stream[0].id)
                if len(getGaps(stream)) > 0:
                    max_gap = params.preprocess_max_gap*stream[0].stats.sampling_rate

                    gaps = getGaps(stream)
                    while len(gaps):
                        too_long = 0
                        for gap in gaps:
                            if int(gap[-1]) <= max_gap:
                                try:
                                    stream[gap[0]] = stream[gap[0]].__add__(stream[gap[1]], method=1,
                                                                        fill_value="interpolate")
                                    stream.remove(stream[gap[1]])
                                except:
                                    stream.remove(stream[gap[1]])

                                break
                            else:
                                too_long += 1

                        if too_long == len(gaps):
                            break
                        gaps = getGaps(stream)
                    del gaps

                stream = stream.split()
                for tr in stream:
                    if tr.stats.sampling_rate < (params.goal_sampling_rate-1):
                        stream.remove(tr)
                taper_length = 20.0  # seconds
                for trace in stream:
                    if trace.stats.npts < 4 * taper_length * trace.stats.sampling_rate:
                        stream.remove(trace)
                    else:
                        trace.detrend(type="demean")
                        trace.detrend(type="linear")
                        trace.taper(max_percentage=None, max_length=1.0)

                if not len(stream):
                    logger.debug(" has only too small traces, skipping...")
                    continue

                for trace in stream:
                    logger.debug(
                        "%s Highpass at %.2f Hz" % (trace.id, params.preprocess_highpass))
                    trace.filter("highpass", freq=params.preprocess_highpass, zerophase=True, corners=4)

                    if trace.stats.sampling_rate != params.goal_sampling_rate:
                        logger.debug(
                            "%s Lowpass at %.2f Hz" % (trace.id, params.preprocess_lowpass))
                        trace.filter("lowpass", freq=params.preprocess_lowpass, zerophase=True, corners=8)

                        if params.resampling_method == "Resample":
                            logger.debug("%s Downsample to %.1f Hz" %
                                          (trace.id, params.goal_sampling_rate))
                            trace.data = resample(
                                trace.data, params.goal_sampling_rate / trace.stats.sampling_rate, 'sinc_fastest')

                        elif params.resampling_method == "Decimate":
                            decimation_factor = trace.stats.sampling_rate / params.goal_sampling_rate
                            if not int(decimation_factor) == decimation_factor:
                                logger.warning("%s CANNOT be decimated by an integer factor, consider using Resample or Lanczos methods"
                                                " Trace sampling rate = %i ; Desired CC sampling rate = %i" %
                                                (trace.id, trace.stats.sampling_rate, params.goal_sampling_rate))
                                sys.stdout.flush()
                                sys.exit()
                            logger.debug("%s Decimate by a factor of %i" %
                                          (trace.id, decimation_factor))
                            trace.data = trace.data[::int(decimation_factor)]

                        elif params.resampling_method == "Lanczos":
                            logger.debug("%s Downsample to %.1f Hz" %
                                          (trace.id, params.goal_sampling_rate))
                            trace.data = np.array(trace.data)
                            trace.interpolate(method="lanczos", sampling_rate=params.goal_sampling_rate, a=1.0)

                        trace.stats.sampling_rate = params.goal_sampling_rate
                    del trace

                if params.remove_response:
                    logger.debug('%s Removing instrument response'%stream[0].id)

                    response = responses[responses["channel_id"] == stream[0].id]
                    if len(response) > 1:
                        response = response[response["start_date"] <= UTCDateTime(gd)]
                    if len(response) > 1:
                        response = response[response["end_date"] >= UTCDateTime(gd)]
                    elif len(response) == 0:
                        logger.info("No instrument response information "
                                     "for %s, skipping" % stream[0].id)
                        continue
                    try:
                        datalesspz = response["paz"].values[0]
                    except:
                        logger.error("Bad instrument response information "
                                      "for %s, skipping" % stream[0].id)
                        continue
                    stream.simulate(paz_remove=datalesspz,
                                    remove_sensitivity=True,
                                    pre_filt=params.response_prefilt,
                                    paz_simulate=None, )
                for tr in stream:
                    tr.data = tr.data.astype(np.float32)
                output += stream
                del stream
            del files
    clean_scipy_cache()
    return output