Esempio n. 1
0
    def create_trace(self, channel, stats, data):
        """Utility to create a new trace object.

        Parameters
        ----------
        channel : str
            channel name.
        stats : obspy.core.Stats
            channel metadata to clone.
        data : numpy.array
            channel data.

        Returns
        -------
        obspy.core.Trace
            trace containing data and metadata.
        """
        stats = Stats(stats)
        if self.data_type is None:
            stats.data_type = 'adjusted'
        else:
            stats.data_type = self.data_type
        if self.data_type is None:
            stats.location = 'A0'
        else:
            stats.location = self.location

        trace = super(AdjustedAlgorithm,
                      self).create_trace(channel, stats, data)
        return trace
    def create_trace(self, channel, stats, data):
        """Utility to create a new trace object.

        Parameters
        ----------
        channel : str
            channel name.
        stats : obspy.core.Stats
            channel metadata to clone.
        data : numpy.array
            channel data.

        Returns
        -------
        obspy.core.Trace
            trace containing data and metadata.
        """
        stats = Stats(stats)
        if self.data_type is None:
            stats.data_type = 'adjusted'
        else:
            stats.data_type = self.data_type
        if self.data_type is None:
            stats.location = 'A0'
        else:
            stats.location = self.location

        trace = super(AdjustedAlgorithm, self).create_trace(channel, stats,
            data)
        return trace
Esempio n. 3
0
 def get_obspy_trace(self):
     """
     Return class contents as obspy.Trace object
     """
     stat = Stats()
     stat.network = self.net.split(b'\x00')[0].decode()
     stat.station = self.sta.split(b'\x00')[0].decode()
     location = self.loc.split(b'\x00')[0].decode()
     if location == '--':
         stat.location = ''
     else:
         stat.location = location
     stat.channel = self.chan.split(b'\x00')[0].decode()
     stat.starttime = UTCDateTime(self.start)
     stat.sampling_rate = self.rate
     stat.npts = len(self.data)
     return Trace(data=self.data, header=stat)
Esempio n. 4
0
 def get_obspy_trace(self):
     """
     Return class contents as obspy.Trace object
     """
     stat = Stats()
     stat.network = self.net.split(b'\x00')[0].decode()
     stat.station = self.sta.split(b'\x00')[0].decode()
     location = self.loc.split(b'\x00')[0].decode()
     if location == '--':
         stat.location = ''
     else:
         stat.location = location
     stat.channel = self.chan.split(b'\x00')[0].decode()
     stat.starttime = UTCDateTime(self.start)
     stat.sampling_rate = self.rate
     stat.npts = len(self.data)
     return Trace(data=self.data, header=stat)
Esempio n. 5
0
def readSLIST(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII SLIST file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/slist.ascii')
    """
    with open(filename, 'rt') as fh:
        # read file and split text into channels
        buf = []
        key = False
        for line in fh:
            if line.isspace():
                # blank line
                continue
            elif line.startswith('TIMESERIES'):
                # new header line
                key = True
                buf.append((line, StringIO()))
            elif headonly:
                # skip data for option headonly
                continue
            elif key:
                # data entry - may be written in multiple columns
                buf[-1][1].write(line.strip() + ' ')
    # create ObsPy stream object
    stream = Stream()
    for header, data in buf:
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 6
0
def readSLIST(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII SLIST file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy.core import read
    >>> st = read('/path/to/slist.ascii')
    """
    fh = open(filename, 'rt')
    # read file and split text into channels
    headers = {}
    key = None
    for line in fh:
        if line.isspace():
            # blank line
            continue
        elif line.startswith('TIMESERIES'):
            # new header line
            key = line
            headers[key] = StringIO()
        elif headonly:
            # skip data for option headonly
            continue
        elif key:
            # data entry - may be written in multiple columns
            headers[key].write(line.strip() + ' ')
    fh.close()
    # create ObsPy stream object
    stream = Stream()
    for header, data in headers.iteritems():
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            # parse data
            data.seek(0)
            if parts[8] == 'INTEGER':
                data = loadtxt(data, dtype='int', ndlim=1)
            elif parts[8] == 'FLOAT':
                data = loadtxt(data, dtype='float32', ndlim=1)
            else:
                raise NotImplementedError
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 7
0
def _read_tspair(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII TSPAIR file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the headers. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/tspair.ascii')
    """
    with open(filename, 'rt') as fh:
        # read file and split text into channels
        buf = []
        key = False
        for line in fh:
            if line.isspace():
                # blank line
                continue
            elif line.startswith('TIMESERIES'):
                # new header line
                key = True
                buf.append((line, io.StringIO()))
            elif headonly:
                # skip data for option headonly
                continue
            elif key:
                # data entry - may be written in multiple columns
                buf[-1][1].write(line.strip().split()[-1] + ' ')
    # create ObsPy stream object
    stream = Stream()
    for header, data in buf:
        # create Stats
        stats = Stats()
        parts = header.replace(',', '').split()
        temp = parts[1].split('_')
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        # don't put blank quality code into 'mseed' dictionary
        # (quality code is mentioned as optional by format specs anyway)
        if temp[4]:
            stats.mseed = AttribDict({'dataquality': temp[4]})
        stats.ascii = AttribDict({'unit': parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 8
0
def readTSPAIR(filename, headonly=False, **kwargs):  # @UnusedVariable
    """
    Reads a ASCII TSPAIR file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: ASCII file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the headers. This is most useful
        for scanning available data in huge (temporary) data sets.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy import read
    >>> st = read('/path/to/tspair.ascii')
    """
    fh = open(filename, "rt")
    # read file and split text into channels
    headers = {}
    key = None
    for line in fh:
        if line.isspace():
            # blank line
            continue
        elif line.startswith("TIMESERIES"):
            # new header line
            key = line
            headers[key] = StringIO()
        elif headonly:
            # skip data for option headonly
            continue
        elif key:
            # data entry - may be written in multiple columns
            headers[key].write(line.strip().split()[-1] + " ")
    fh.close()
    # create ObsPy stream object
    stream = Stream()
    for header, data in headers.iteritems():
        # create Stats
        stats = Stats()
        parts = header.replace(",", "").split()
        temp = parts[1].split("_")
        stats.network = temp[0]
        stats.station = temp[1]
        stats.location = temp[2]
        stats.channel = temp[3]
        stats.sampling_rate = parts[4]
        # quality only used in MSEED
        stats.mseed = AttribDict({"dataquality": temp[4]})
        stats.ascii = AttribDict({"unit": parts[-1]})
        stats.starttime = UTCDateTime(parts[6])
        stats.npts = parts[2]
        if headonly:
            # skip data
            stream.append(Trace(header=stats))
        else:
            data = _parse_data(data, parts[8])
            stream.append(Trace(data=data, header=stats))
    return stream
Esempio n. 9
0
def raw_import(gzip_filename):
    """
    Makes a 'raw' stream file from the gzipped csv file.
    The csv file has been downloaded from the JAXA website.
    The method makes a raw stream which does not yet have the frames
    reconstructed.

    :type gzip_filename: str
    :param gzip_filename: gzipped filename of the CSV file to be read.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    """

    # read the gzipped csv file
    with gzip.open(gzip_filename, 'rt') as fh:
        # read file
        buf = []
        header = next(fh).split(',')

        # read the header
        # it should contain either 1 channel or 3
        if len(header) == 8:
            # the RESP files use either 'MH1', 'MH2', 'MHZ'
            # the JAXA files use 'LPX', 'LPY', 'LPZ'
            # X should point north, Y east, but this is not always the case
            # so we rename LPX to MH1, and LPY to MH2
            channels = ['MH1', 'MH2', 'MHZ']
            raw_channels = ['_M1', '_M2', '_MZ']
            for line in fh:
                temp = line.split(',')

                try:
                    temp[4] = UTCDateTime(temp[4])
                except ValueError as e:
                    # this is a specific error which is found in the csv file
                    if temp[4] == '1975-49-11 19:13:04.232000':
                        temp[4] = UTCDateTime('1975-09-11 19:13:04.232000')
                    else:
                        raise

                try:
                    temp[0] = int(temp[0])
                except ValueError as e:
                    # this is a specific error which is found in the csv file
                    if temp[4] == UTCDateTime(
                            '1975-09-15 12:53:36.849000') and temp[0] == '<3':
                        temp[0] = 83
                    else:
                        raise

                buf.append(
                    (temp[1], temp[2], temp[4], int(temp[0]), int(temp[3]),
                     int(temp[5]), int(temp[6]), int(temp[7])))

        elif len(header) == 6:
            channels = ['SPZ']
            raw_channels = ['_SZ']
            for line in fh:
                # check the manual list of points which have been removed
                if line in remove_manually:
                    continue

                temp = line.split(',')
                # the original order:
                # frame_count, ap_station, ground_station, nc, time, spz
                # make a tuple (in a new order so that it can be sorted):
                # ap_station, ground_station, time, frame_count, nc, spz
                buf.append(
                    (temp[1], temp[2], UTCDateTime(temp[4]), int(temp[0]),
                     int(temp[3]), int(temp[5])))

    # sort by ap_station, ground_station and time (and also everything else,
    # but that won't matter)
    buf.sort()

    stream = Stream()
    data_x = []
    data_y = []
    data_z = []
    data_sz = []
    abs_times = []
    frame_count_ncs = []
    corr_frame_count_ncs = []

    stats = Stats()
    stats.delta = DELTA
    network = 'XA'
    last_id = None

    for data in buf:

        # read in the data from the buffer
        station = data[0].rjust(3, 'S')
        ground_station = data[1].rjust(2, '0')
        time = data[2]

        frame_count = data[3]
        nc = data[4]
        # create a combination of frame count and nc - from 0.0 to 89.75
        frame_count_nc = float(frame_count) + (float(nc) - 1.) * 0.25

        id = "{0:s}.{1:s}.{2:s}.{3:s}".format(network, station, ground_station,
                                              channels[0])

        # check whether we are adding to an existing one, or creating a new one
        if (last_id is None or last_id != id):
            # before creating the new one, add previous trace(s) to the stream
            if len(abs_times) > 0:
                _make_traces(stream=stream,
                             stats=stats,
                             header=header,
                             channels=raw_channels,
                             data_x=data_x,
                             data_y=data_y,
                             data_z=data_z,
                             data_sz=data_sz,
                             abs_times=abs_times,
                             frame_count_ncs=frame_count_ncs)

            data_x = []
            data_y = []
            data_z = []
            data_sz = []
            abs_times = []
            frame_count_ncs = []

            stats = Stats()
            stats.delta = DELTA
            stats.starttime = time
            stats.network = network
            stats.station = station
            stats.location = ground_station

        # add the data) from any line
        if len(header) == 8:
            data_x.append(data[5])
            data_y.append(data[6])
            data_z.append(data[7])
        else:
            data_sz.append(data[5])
        abs_times.append(time.timestamp)
        frame_count_ncs.append(frame_count_nc)

        last_id = id

    # add the last one
    if len(abs_times) > 0:
        _make_traces(stream=stream,
                     stats=stats,
                     header=header,
                     channels=raw_channels,
                     data_x=data_x,
                     data_y=data_y,
                     data_z=data_z,
                     data_sz=data_sz,
                     abs_times=abs_times,
                     frame_count_ncs=frame_count_ncs)

    return stream
Esempio n. 10
0
    # Set the correct year, month, and day for Time_stamps
    sttime = UTCDateTime(Time_stamps[0])
    endtime = UTCDateTime(Time_stamps[len(Time_stamps) - 1])
    sttime._set_year(2017)
    endtime._set_year(2017)
    sttime._set_month(8)
    endtime._set_month(8)
    sttime._set_day(13 + UTCDateTime(Time_stamps[0]).day)
    endtime._set_day(13 + UTCDateTime(Time_stamps[len(Time_stamps) - 1]).day)

    # Define stats
    stats = Stats()
    stats.starttime = sttime
    stats.station = station
    stats.network = 'NT'
    stats.location = 'R0'
    stats.data_interval = '256Hz'
    stats.delta = .00390625
    stats.data_type = 'variation'

    # Create list of arrays and channel names and intialize counter k
    arrays = [Hx, Hy, Ex, Ey]
    k = 0

    # Loop over channels to create an obspy stream of the data
    for ar in arrays:
        stats.npts = len(ar)
        stats.channel = channels[k]
        ar = np.asarray(ar)
        trace = Trace(ar, stats)
        stream += trace
Esempio n. 11
0
    def save_wave(self):

        # Fetch a wave from Ring 0
        wave = self.ring2buff.get_wave(0)

        # if wave is empty return
        if wave == {}:
            return

        # Lets try to buffer with python dictionaries and obspy
        name = wave["station"] + '.' + wave["channel"] + '.' + wave[
            "network"] + '.' + wave["location"]

        if name in self.wave_buffer:

            # Determine max samples for buffer
            max_samp = wave["samprate"] * 60 * self.minutes

            # Create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Try to append data to buffer, if gap shutdown.
            try:
                self.wave_buffer[name].append(wavetrace,
                                              gap_overlap_check=True)
            except TypeError as err:
                logger.warning(err)
                self.runs = False
            except:
                raise
                self.runs = False

            # Debug data
            if self.debug:
                logger.info("Station Channel combo is in buffer:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])

        else:
            # First instance of data in buffer, create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Create a RTTrace
            rttrace = RtTrace(int(self.minutes * 60))
            self.wave_buffer[name] = rttrace

            # Append data
            self.wave_buffer[name].append(wavetrace, gap_overlap_check=True)

            # Debug data
            if self.debug:
                logger.info("First instance of station/channel:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])