Example #1
0
    def _init_header(self, path):
        config = _load_config(path)
        metadata = config['metadata']
        header = Stats()
        config_key = self._config['header_sampling_rate_key']
        try:
            header.sampling_rate = float(metadata[config_key])
        except KeyError:
            raise KeyError("The following key was not found in the metadata: " +
                           "{}. Did you set the correct ".format(config_key) +
                           "'sample rate metadata key' in PAL H5 Output module?")
        header.npts = int(metadata[self._config['header_samples_per_record_key']]) - 1
        try:
            if self._config['dd_300']:
                header.calib = metadata['dd_300_calibration']
            elif self._config['dd_900']:
                header.calib = metadata['dd_900_calibration']
            elif self._config['vd_08']:
                header.calib = metadata['vd_08_calibration']
            elif self._config['vd_09']:
                header.calib = metadata['vd_09_calibration']
        except KeyError:
            pass
        header.comments = str(config['comments'])
        header.place = metadata

        if self._config['header_extra1_name'] != '' and self._config['header_extra1_val'] != '':
            header[self._config['header_extra1_name']] = self._config['header_extra1_val']
        if self._config['header_extra2_name'] != '' and self._config['header_extra2_val'] != '':
            header[self._config['header_extra2_name']] = self._config['header_extra2_val']
        return header
Example #2
0
def read_IMS_ASCII(path, net='', **kwargs):
    """
    read a IMS_ASCII seismogram from a single station
    :param path: path to file
    :return: uquake.core.Stream
    """

    data = np.loadtxt(path, delimiter=',', skiprows=1)
    stats = Stats()

    with open(path) as fid:
        field = fid.readline().split(',')

    stats.sampling_rate = float(field[1])
    timetmp = datetime.fromtimestamp(float(field[5])) \
              + timedelta(
        seconds=float(field[6]) / 1e6)  # trigger time in second

    trgtime_UTC = UTCDateTime(timetmp)
    stats.starttime = trgtime_UTC - float(field[10]) / stats.sampling_rate
    stats.npts = len(data)

    stats.station = field[8]
    stats.network = net

    traces = []
    component = np.array(['X', 'Y', 'Z'])
    std = np.std(data, axis=0)
    mstd = np.max(std)

    for k, dt in enumerate(data.T):
        stats.channel = '%s' % (component[k])
        traces.append(Trace(data=np.array(dt), header=stats))

    return Stream(traces=traces)
Example #3
0
def convert_to_obspy(signal, fsp):
    # type: (np.array, float) -> obspy.core.trace
    """
    Function to convert a numpy array from any infrasound reading into Obspy format.
    This could help us to take advantage of Obspy optimization routines.
    :param signal: A numpy array containg the signal we want to compute.
    :param fsp: The sampling frequency of the signal.
    :return:
    """
    # we substract the mean

    stats = Stats()
    stats.sampling_rate = float(fsp)
    stats.npts = signal.shape[0]
    return Trace(data=signal.reshape(signal.shape[0], ), header=stats)
Example #4
0
    def _init_header(self, path):
        config = _load_config(path)
        metadata = config['metadata']
        header = Stats()

        if 'ATS9440' in config['plugins'].keys():
            ats_config = config['plugins']['ATS9440']['config']
        elif 'ATS660' in config['plugins'].keys():
            ats_config = config['plugins']['ATS660']['config']
        else:
            raise KeyError('Cannot locate trace config data')

        if 'Polytec' in config['plugins'].keys():
            polytec_config = config['plugins']['Polytec']['config']
        else:
            raise KeyError('Cannot locate vibrometer config data')

        header.sampling_rate = _calc_sampling_rate(ats_config['sample_rate'])
        header.npts = int(ats_config['pre_trigger_samples'] +
                          ats_config['post_trigger_samples']) - 1

        if polytec_config['dd_300']:
            header.calib = float(
                re.findall(_NUMBER, polytec_config['dd_300_range'])[0])
        elif polytec_config['dd_900']:
            header.calib = float(
                re.findall(_NUMBER, polytec_config['dd_900_range'])[0])
        elif polytec_config['vd_08']:
            header.calib = float(
                re.findall(_NUMBER, polytec_config['vd_08_range'])[0])
        elif polytec_config['vd_09']:
            header.calib = float(
                re.findall(_NUMBER, polytec_config['vd_09_range'])[0])
        else:
            raise KeyError('Cannot locate vibrometer calibration data')

        header.comments = str(config['comments'])
        header.place = metadata

        if self._config['header_extra1_name'] != '' and self._config[
                'header_extra1_val'] != '':
            header[self._config['header_extra1_name']] = self._config[
                'header_extra1_val']
        if self._config['header_extra2_name'] != '' and self._config[
                'header_extra2_val'] != '':
            header[self._config['header_extra2_name']] = self._config[
                'header_extra2_val']
        return header
Example #5
0
 def python2obspy(self):
     from obspy.core.trace import Stats, Trace
     from obspy.core.utcdatetime import UTCDateTime
     s = Stats()
     s.network = self.network
     s.station = self.station
     s.location = self.location
     s.channel = self.channel
     s.sampling_rate = self.sampling_rate
     s.starttime = UTCDateTime(self.starttime)
     s.npts = len(self.data)
     misc_fields = dict()
     if 'CALIB' in self.misc_fields:
         s.calib = self.misc_fields.pop('CALIB')
     s.update(self.misc_fields)
     return Trace(self.data[:], header=s)
Example #6
0
 def python2obspy(self):
     from obspy.core.trace import Stats, Trace
     from obspy.core.utcdatetime import UTCDateTime
     s = Stats()
     s.network = self.network
     s.station = self.station
     s.location = self.location
     s.channel = self.channel
     s.sampling_rate = self.sampling_rate
     s.starttime = UTCDateTime(self.starttime)
     s.npts = len(self.data)
     misc_fields = dict()
     if 'CALIB' in self.misc_fields:
         s.calib = self.misc_fields.pop('CALIB')
     s.update(self.misc_fields)
     return Trace(self.data[:], header=s)
Example #7
0
    def _init_header(self, path):
        config = _load_config(path)
        metadata = config['metadata']
        header = Stats()

        if 'ATS9440' in config['plugins'].keys():
            ats_config = config['plugins']['ATS9440']['config']
        elif 'ATS660' in config['plugins'].keys():
            ats_config = config['plugins']['ATS660']['config']
        else:
            raise KeyError('Cannot locate trace config data')

        if 'Polytec' in config['plugins'].keys():
            polytec_config = config['plugins']['Polytec']['config']
        else:
            raise KeyError('Cannot locate vibrometer config data')

        header.sampling_rate = _calc_sampling_rate(ats_config['sample_rate'])
        header.npts = int(ats_config['pre_trigger_samples'] +
                          ats_config['post_trigger_samples']) - 1

        if polytec_config['dd_300']:
            header.calib = float(re.findall(
                _NUMBER, polytec_config['dd_300_range'])[0])
        elif polytec_config['dd_900']:
            header.calib = float(re.findall(
                _NUMBER, polytec_config['dd_900_range'])[0])
        elif polytec_config['vd_08']:
            header.calib = float(re.findall(
                _NUMBER, polytec_config['vd_08_range'])[0])
        elif polytec_config['vd_09']:
            header.calib = float(re.findall(
                _NUMBER, polytec_config['vd_09_range'])[0])
        else:
            raise KeyError('Cannot locate vibrometer calibration data')

        header.comments = str(config['comments'])
        header.place = metadata

        if self._config['header_extra1_name'] != '' and self._config['header_extra1_val'] != '':
            header[self._config['header_extra1_name']
                   ] = self._config['header_extra1_val']
        if self._config['header_extra2_name'] != '' and self._config['header_extra2_val'] != '':
            header[self._config['header_extra2_name']
                   ] = self._config['header_extra2_val']
        return header
Example #8
0
    def to_sac_and_mseed(self, export_path, station_number, force_without_loc):
        # Check if file exist
        export_path_sac = export_path + self.get_export_file_name() + ".sac"
        export_path_msd = export_path + self.get_export_file_name() + ".mseed"
        #export_path_wav = export_path + self.get_export_file_name() + ".wav"
        if os.path.exists(export_path_sac) and os.path.exists(export_path_msd):
            return

        # Check if the station location have been calculated
        if self.station_loc is None and not force_without_loc:
            print self.get_export_file_name() + ": Skip sac/mseed generation, wait the next ascent to compute location"
            return

        # Fill header info
        stats = Stats()
        stats.sampling_rate = self.decimated_fs
        stats.network = "MH"
        stats.station = station_number
        stats.starttime = self.date

        stats.sac = dict()
        if not force_without_loc:
            stats.sac["stla"] = self.station_loc.latitude
            stats.sac["stlo"] = self.station_loc.longitude
        stats.sac["stdp"] = self.depth
        stats.sac["user0"] = self.snr
        stats.sac["user1"] = self.criterion
        stats.sac["iztype"] = 9  # 9 == IB in sac format

        # Save data into a Stream object
        trace = Trace()
        trace.stats = stats
        trace.data = self.data
        stream = Stream(traces=[trace])

        # Save stream object
        print export_path_sac
        stream.write(export_path_sac, format='SAC')
        print export_path_msd
        stream.write(export_path_msd, format='MSEED')
Example #9
0
def synthetic_seismogram(Mw,
                         duration=0.1,
                         sampling_rate=10000,
                         vp=5000.0,
                         vs=3500.0,
                         rho=2400,
                         SSD=1,
                         pwave=True):
    """
    Create a synthetic displacement pulse at the source seismogram based
    on the brune model (Brune 1970).
    This model is extensively used and generally agrees with observations from
    many different setting and over a large range of magnitude.

    The displacement time function, u(t), is expressed as follows:

        u(t) = A_0 x t x omega_0 x H(t) * exp(-t x omega_0) ,

    where t the time, omega_0, the angular frequency and H(t) is the
    heavyside function. Note that the angular frequency is calculated from
    the static stress drop (SSD). A0 is given by the following
    equation:

        A0 = M0 / (4 * pi * rho * v ** 3)

    References for further reading:
    - Routine data processing in earthquake seismology
    - Relating Peak Particle Velocity and Acceleration to Moment Magnitude
    in Passive (Micro-) Seismic Monitoring
    (www.bcengineers.com/images/BCE_Technical_Note_3.pdf)

    :param Mw: the moment magnitude of the seismic event (default: -1),
    this value determine the wave amplitude and the
    frequency content
    :type Mw: float
    :param noise_level: level of gaussian noise to add to the synthetic
    seismogram (default: 1e-5)
    :type noise_level: float
    :param duration: duration of the seismogram in seconds (default: 0.1),
    the pulse is centered at zero
    :type duration: float
    :param sampling_rate: sampling rate in Hz of the generated time series (
    default: 10000)
    :type sampling_rate: int
    :param vp: P-wave velocity of the material at the source (default: 5000 m/s)
    :type vp: float
    :param vs: S-wave velocity of the material at the source (default: 3500 m/s)
    :type vs: float
    :param rho: density of the material at the source in kg/m**3 (default:
    2400 kg/m**3)
    :param SSD: Static stress drop in "bar" (default: 1 bar)
    :type SSD: float
    :param pwave: Return P-wave displacement seismogram if True and S-wave
    displacement seismogram if false
    :rparam: tuple Obspy Trace containing the seismogram
    :rtype: Obspy Trace

    .. note::
        The velocity and acceleration can easily be obtained by
        differentiating the trace using the Obspy Trace method
        differentiate.

        Example
        >>> tr = synthetic_seismogram(-1)
        >>> tr.differentiate()  # this creates a velocity trace
        >>> tr.differentiate()  # this creates an acceleration trace

        This operation is performed in place on the actual data arrays. The
        raw data is not accessible anymore afterwards. To keep your
        original data, use :meth:`~obspy.core.trace.Trace.copy` to create
        a copy of your trace object.
        This also makes an entry with information on the applied processing
        in ``stats.processing`` of this trace.

    """

    M0 = Mw2M0(Mw)
    (f0p, f0s) = corner_frequency(Mw, vp, vs, SSD)
    # duration = 5 / f0p
    npts = duration * sampling_rate
    t = np.arange(npts) / sampling_rate
    if pwave:
        W0 = 2 * np.pi * f0p
        v = vp
    else:
        W0 = 2 * np.pi * f0s
        v = vs

    A0 = M0 / (4 * np.pi * rho * v**3)

    data = A0 * t * W0**2 * np.exp(-t * W0)
    data = np.roll(data, len(data) / 2)

    stats = Stats()
    stats.sampling_rate = sampling_rate
    stats.npts = 2 * npts - 1

    from uquake.core.util.cepstrum import minimum_phase
    minphase_data = np.roll(minimum_phase(data, len(data)), len(data) / 2)

    return Trace(data=data, header=stats)
Example #10
0
def invert_raw():
    ######################################
    # Binary
    ######################################
    if mode == "Binary":
        catch_files = []
        files = glob.glob(file_path + "*")
        for file in files:
            catch = re.findall(
                ".*[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6}",
                file)
            if len(catch) > 0:
                catch_files.append(file)

        ######################################
        # Freq file
        ######################################
        freq_file = glob.glob(file_path + "*_freq")
        if len(freq_file) > 1:
            print "warning : more than one freq file in folder"
        if len(freq_file) == 0:
            print "warning no freq file discovered use :" + str(sampling_freq)
        else:
            content = "40.000000"
            with open(freq_file[0], "r") as f:
                content = f.read()
            sampling_freq = float(content)
            print "Sampling used : " + str(sampling_freq)
        files_nb = len(catch_files)
        file_offset = 1
        for catch_file in catch_files:
            print catch_file
            print "File nb : " + str(file_offset) + "/" + str(files_nb)
            date = UTCDateTime(
                re.findall(
                    ".*([0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}_[0-9]{2}_[0-9]{2}\.[0-9]{6})",
                    catch_file)[0])
            rawdata = numpy.fromfile(catch_file, numpy.int32)
            ######################################
            # Plot plotly file
            ######################################
            # Add acoustic values to the graph
            #data_line = graph.Scattergl(x=[date + i/sampling_freq for i in range(0,len(rawdata))],
            #                          y=rawdata,
            #                          name="counts",
            #                          line=dict(color='blue', width=2),
            #                          mode='lines')

            #plotlydata = [data_line]

            #layout = graph.Layout(title="Plot",
            #                      xaxis=dict(title='Date', titlefont=dict(size=18)),
            #                      yaxis=dict(title='Counts', titlefont=dict(size=18)),
            #                      hovermode='closest'
            #                      )

            #plotly.plot({'data': plotlydata, 'layout': layout},
            #            filename=catch_file + ".html",
            #            auto_open=False)

            ######################################
            # Create SAC file
            ######################################

            # Fill header info
            stats = Stats()
            stats.sampling_rate = sampling_freq
            stats.network = "test"
            stats.station = 0
            stats.starttime = date
            stats.sac = dict()

            # Save data into a Stream object
            trace = Trace()
            trace.stats = stats
            trace.data = rawdata
            stream = Stream(traces=[trace])

            # Save stream object
            stream.write(catch_file + ".sac", format='SAC')
            stream.write(catch_file + ".mseed", format='MSEED')
            file_offset = file_offset + 1
    else:
        ######################################
        # Text
        ######################################
        #filename = "tool_invert_raw/1553771378.490936"
        #date = UTCDateTime(1553771378.490936)
        # text
        #f = open(filename, 'r')
        #rawdata = numpy.array(f.read().rstrip('\n').split('\n'))
        #f.close()
        # binary
        ######################################
        # Plot plotly file
        ######################################

        # Add acoustic values to the graph
        data_line = graph.Scattergl(
            x=[date + i / sampling_freq for i in range(0, len(rawdata))],
            y=rawdata,
            name="counts",
            line=dict(color='blue', width=2),
            mode='lines')

        plotlydata = [data_line]

        layout = graph.Layout(title="Plot",
                              xaxis=dict(title='Date',
                                         titlefont=dict(size=18)),
                              yaxis=dict(title='Counts',
                                         titlefont=dict(size=18)),
                              hovermode='closest')

        plotly.plot({
            'data': plotlydata,
            'layout': layout
        },
                    filename=filename + ".html",
                    auto_open=False)

        ######################################
        # Create SAC file
        ######################################

        # Fill header info
        stats = Stats()
        stats.sampling_rate = sampling_freq
        stats.network = "test"
        stats.station = 0
        stats.starttime = date
        stats.sac = dict()

        # Save data into a Stream object
        trace = Trace()
        trace.stats = stats
        trace.data = rawdata
        stream = Stream(traces=[trace])

        # Save stream object
        stream.write(filename + ".sac", format='SAC')
Example #11
0
    def attach_obspy_trace_stats(self, kstnm, kinst, force_without_loc=False):
        '''Attaches attribute: obspy_trace_stats, an obspy.core.trace.Stats instance.

        obspy_trace_stats holds metadata common to both miniSEED and SAC formats.
        obspy_trace_stats.sac holds extra metadata only found in the SAC format.

        Floats are NOT converted to np.float32() in either case.

        NB: the SAC header value shown to the world (e.g., "sac.delta"), and the private SAC header
        written to disk (e.g., "sac._hf[0]"), differ in type.  The relevant float header values that
        actually get written to disk with sac.write are stored in the private "._hf" attribute,
        which is not generated with initialization of the raw Stats() container. Therefore, if
        printing those values to, e.g. a text file, ensure the relevant F (float) fields are cast to
        np.float32 first.

        For example:
        >> from obspy.core.trace import Trace
        >> from obspy.io.sac.sactrace import SACTrace
        >> trace = Trace()
        >> sac = SACTrace.from_obspy_trace(trace)  <-- this gets called by sac.write (within stream.write)
        >> sac.delta = 1/20
        >> isinstance(sac.delta, float)            <-- True: this is the public attr shown to the world
        >> isinstance(sac.delta, np.float32)       <-- False
        >> isinstance(sac._hf[0], float)           <-- False
        >> isinstance(sac._hf[0], np.float32)      <-- True: this is the private attr written to disk

        For more detail see: http://www.adc1.iris.edu/files/sac-manual/manual/file_format.html

        Update function `events.write_metadata` if the fields in this method are changed.

        '''

        # Fill metadata common to SAC and miniSEED formats
        stats = Stats()
        stats.network = utils.network()
        stats.station = kstnm
        stats.location = "00"
        stats.channel = utils.band_code(
            self.decimated_fs) + "DH"  # SEED manual Appendix A
        stats.starttime = self.corrected_starttime
        stats.sampling_rate = self.decimated_fs
        stats.npts = len(self.processed_data)

        # Extra metadata, some of which is only written to SAC files
        keys = [
            'stla', 'stlo', 'stel', 'stdp', 'scale', 'cmpaz', 'cmpinc',
            'user0', 'user1', 'user2', 'user3', 'kinst', 'kuser0', 'kuser1',
            'kuser2'
        ]
        def_float = -12345.

        # Default SAC header (we may not will not fill all of these keys)
        stats.sac = dict.fromkeys(keys, def_float)

        # Fill station-location header fields.
        if not force_without_loc:
            stats.sac["stla"] = self.station_loc.latitude
            stats.sac["stlo"] = self.station_loc.longitude

        # Elevation is 0 (our reference is truly sea level)
        stats.sac["stel"] = 0

        # Add scaling factor to convert digital counts to Pa
        stats.sac["scale"] = utils.sacpz_const()

        # Add dip (CMPINC; "component incidence") in SAC dip convention, using as guide:
        # https://github.com/iris-edu/mseed2sac/blob/master/doc/mseed2sac.md
        #
        # SAC dip convention: "degrees down from vertical up/outward",
        # i.e., BHN, BHE = 90, BHZ = 0
        #
        # SEED dip convection: "degrees down from horizontal"
        # i.e., BHN, BHE = 0, BHZ = -90
        stats.sac["cmpinc"] = 0  # SAC dip

        # Add azimuth: horizontal projection of component vector measured clockwise from north
        # It is 0 for vertical components. Theoretically, BHN, BHZ = 90, BHE = 90
        stats.sac["cmpaz"] = 0

        # NB: I checked how IRIS serves up hydrophone data (in MATLAB):
        # >> s = irisFetch.Stations('channel', '*', '*', '*', '?DH')
        #
        # For all 3233 channels from 2147 stations that were returned:
        # dip = -90, 0, or 90
        # azimuth = 0 or 360
        #
        # For dip = -90, I assume that is the SEED dip convention
        # For dip = +90, I do not know; I thought perhaps it might be some(thing like a?)
        # right-hand-rule convention, but not all +90 dips are associated with 360 azimuth

        # REQ events do not record their depth at the time of acquisition, and because the onboard
        # detection algorithm was not triggered there are no trigger parameters to report
        if not self.is_requested:
            stats.sac[
                "stdp"] = self.depth  # meters (from external pressure sensor; down is positive)
            stats.sac["user0"] = self.snr
            stats.sac["user1"] = self.criterion
            stats.sac["user2"] = self.trig  # sample index

        # Clock drift correction, which is the 'Time correction' applied in the 48-byte
        # fixed header in utils.set_mseed_time_correction()
        stats.sac[
            "user3"] = self.clockdrift_correction  # = self.mseed_time_correction

        # Generic instrument (e.g., '452.020')
        stats.sac['kinst'] = kinst

        # automaid version number
        stats.sac["kuser0"] = self.__version__

        # String describing detection/request status, and number of wavelet scales transmitted
        # (e.g., 'DET.WLT5')
        reqdet_scales = self.processed_file_name.split('.')[-2:]
        stats.sac['kuser1'] = '.'.join(reqdet_scales)

        # String detailing the type of (i)CDF24 transform: edge correction and
        # normalization
        stats.sac[
            'kuser2'] = 'ec' + self.edges_correction + 'norm' + self.normalized

        # Attach Stats to events object
        self.obspy_trace_stats = stats
Example #12
0
  def dorange (self):
    # load batches
    print "mkms: loading batches.."
    self.bdatas = []

    for i in self.ids:
      d = Dat ()
      d.read (os.path.join (self.root, str(i) + '.DAT'))

      self.bdatas.append (d.bdata)

    # set up datastream
    print "mkms: setting up stream for %s.." % self.station,
    self.st = Stream ()
    for bd in self.bdatas:
      for b in bd.batches:
        s = Stats ()
        s.sampling_rate = self.sampling_rate
        s.npts = b.length
        s.network = self.network
        s.location = self.location
        s.station = self.station
        s.channel = self.channel
        s.starttime = UTCDateTime ((b.ref / 1000000.0))

        t = Trace (data = numpy.array (b.samples_i, dtype = numpy.int32), header = s)
        self.st.append (t)

    print "done."

    # generate file name
    self.name = self.st[0].id.replace ('.', '_')

    self.start = self.st[0].stats.starttime
    self.name = self.start.strftime ("%Y-%m-%d-%H%M-%S") + '.' + self.name

    if self.optplot:
      self.plot ()

    if not self.optnowrite:
      print "mkms: writing %s.mseed.." % self.name,

      if not os.path.exists (self.destdir):
        os.makedirs (self.destdir)

      self.st.write (os.path.join (self.destdir, self.name + '.mseed'), format = 'MSEED', encoding = 'INT32', byteorder = 1, flush = 1, verbose = 0)

      print "done."

      # write ids and refs
      idsf = open (os.path.join (self.destdir, self.name + '.ids'), 'w')
      refsf = open (os.path.join (self.destdir, self.name + '.refs'), 'w')
      for bd in self.bdatas:
        idsf.write ("%d,%d\n" % (bd.id, 1 if bd.e_sdlag else 0))

        for b in bd.batches:
          refsf.write ("%d,%d,%d,%d,%s,%s,%s,%s,%s,%d\n" % (bd.id, b.no, b.ref, b.status, b.latitude[:-2], b.latitude[-2:], b.longitude[:-2], b.longitude[-2:], b.checksum, 1 if b.checksum_pass else 0))

      idsf.close ()
      refsf.close ()

      return (self.name + '.mseed', idsf, refsf)
    else:
      print "mkms: would write %s.mseed (disabled)." % os.path.join (self.destdir, self.name)
      return None
Example #13
0
        traceN = Trace(ax)
        traceE = Trace(az)
        if PlotUnit == 'VEL':
            #integrate accerelation if plotting velocity
            traceN.integrate(method='cumtrapz')
            traceE.integrate(method='cumtrapz')
        plottheta = theta[thetacount] * 180 / np.pi
        thetacount = thetacount + 1
        # store stats
        stationname = sta + '%04d' % i
        channelnameN = cha + '%s' % 'N'
        channelnameE = cha + '%s' % 'E'

        # for NS components
        statsN = Stats()
        statsN.sampling_rate = 1.0 / sampling_rate_x
        statsN.delta = sampling_rate_x
        statsN.starttime = starttime
        statsN.npts = len(traceN.data)
        statsN.network = net
        statsN.station = stationname
        statsN.location = ''
        statsN.channel = channelnameN
        traceN.stats = statsN
        traceN.stats.sac = obspy.core.AttribDict()
        traceN.stats.sac.back_azimuth = plottheta  # use this as azimuth of station

        #---applying filters---#
        traceN.filter('bandpass', freqmin=freqmin, freqmax=freqmax)
        tN = traceN.stats.starttime
        traceN.trim(starttime=tN, endtime=tN + trim_end_time)