Exemplo n.º 1
0
    def _upsample(self, trace, upfactor):
        """
        Upsample a data stream by a given factor, prior to decimation. The
        upsampling is done using a linear interpolation.

        Parameters
        ----------
        trace : obspy Trace object
            Trace to be upsampled
        upfactor : int
            Factor by which to upsample the data in trace

        Returns
        -------
        out : obpsy Trace object
            Upsampled trace

        """

        data = trace.data
        dnew = np.zeros(len(data) * upfactor - (upfactor - 1))
        dnew[::upfactor] = data
        for i in range(1, upfactor):
            dnew[i::upfactor] = float(i) / upfactor * data[1:] \
                         + float(upfactor - i) / upfactor * data[:-1]

        out = Trace()
        out.data = dnew
        out.stats = trace.stats
        out.stats.npts = len(out.data)
        out.stats.starttime = trace.stats.starttime
        out.stats.sampling_rate = int(upfactor * trace.stats.sampling_rate)

        return out
Exemplo n.º 2
0
def stack_autocorr(st,stacklength='all',pw=True,v=2,prestack=True,psl=24,rw=False,half=True):
    """Stack auto-correlation (stream)
    ...

    Parameters
    ----------
    st : obspy stream
    stacklength : integer (number of traces), or 'all' (to stack all traces), or 'month' (for monthly stacks).
    pw : Apply a phase weighted stack? (True/False)
    v : Order of the phase weights (0 is equal to a linear stack)
    prestack : Apply a linear pre-stack of length psl? (True/False)
    psl : Prestack length. Number of traces to stack linearly before phase weightes stack.
    rw : Return the phase weights (not the phase weighted stack)? (True/False)
    half : Half the auto-correlation so its not mirrored? (True/False)

    return: obspy stream containing stacks equal to value defined in 'stacklength'
    """
    stream=st.copy()
    stream.sort(keys=["starttime"])
    stream=half_autocorr(stream,flip=False,half=half)
    stream2=Stream()
    start=copy.deepcopy(stream[0].stats['starttime'])
    end=copy.deepcopy(stream[len(stream)-1].stats['endtime'])
    curtime=copy.deepcopy(start)
    while curtime < end:
        swin=copy.deepcopy(curtime)
        if stacklength == 'month':
            ewin=copy.deepcopy(swin)
            if ewin.month < 12:
                try:
                    ewin.month=ewin.month+1
                except:
                    ewin.day=ewin.day-3
                    ewin.month=ewin.month+1
            else:
                 ewin.year=ewin.year+1
                 ewin.month=1
        else:
            if stacklength == 'all':
                ewin=end
            else:
                ewin=copy.deepcopy(curtime+stacklength*60*60*24)
        sl=stream.slice(swin,ewin)
        curtime=copy.deepcopy(ewin)
        if len(sl) > 0:
            slr= extract_ndarray(sl,smartpad=False)
            if len(slr) > 0:
                if len(slr) == 1:
                    stt=slr[0]
                else:
                    if pw:
                        stt=PWStack(slr,v=v,psl=psl,rw=rw,prestack=prestack)
                    else:
                        stt=np.nansum(slr,axis=0)
                sltr=Trace(stt)
                sltr.stats=sl[0].stats
                stream2+=sltr
    return(stream2)
Exemplo n.º 3
0
def test_adjoint_functions():

    observed = Trace(data=np.ones(25))
    synthetic = Trace(data=np.ones(25))
    synthetic.data[0:13] *= 2.0
    synthetic.stats.sac = {}
    synthetic.stats.sac['dist'] = 6000.
    observed.stats = synthetic.stats.copy()

    window_params = {}
    window_params['hw'] = 2.0
    window_params['sep_noise'] = 1.0
    window_params['win_overlap'] = False
    window_params['wtype'] = "hann"
    window_params['plot'] = False

    g_speed = 2000.

    adj, success = am.log_en_ratio_adj(observed, synthetic, g_speed,
                                       window_params)

    assert (success)
    assert (len(adj) == 25)
    assert (pytest.approx(adj[10]) == -0.16666666666666663)

    adj, success = am.windowed_waveform(observed, synthetic, g_speed,
                                        window_params)
    assert (success)
    assert (len(adj) == 25)
    assert (pytest.approx(adj[9]) == 1.)

    func = am.get_adj_func('energy_diff')
    adj, success = func(observed, synthetic, g_speed, window_params)
    assert (success)
    assert (len(adj) == 2)
    assert (type(adj) == list)
    assert (pytest.approx(adj[1].sum()) == 6.)
Exemplo n.º 4
0
def txt2sac(txt,output_dir = os.getcwd()):
  ''' This function will convert txt file to SAC file'''
  with open(txt,encoding='iso-8859-9') as eqfile:
    if os.stat(txt).st_size == 0:
      print(txt + ' is empty')
      return
    head = [next(eqfile) for x in range(14)]
      
  head = [line.rstrip('\n') for line in head]
  hd =  AttribDict()
  hd['sac'] = AttribDict()
  # Retrieve Event Information
  # Retrieve EventTime
  s = ''.join(i for i in head[2] if i.isdigit())
  evtime = datetime.strptime(s, '%Y%m%d%H%M%S%f')
  # Retrieve EVLA, EVLO
  _,coors = head[3].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  evla,evlo = coors.split('-')
  hd['sac'].evla = float(evla.replace(',','.')); hd['sac'].evlo = float(evlo.replace(',','.'))
  # Retrieve EVDP
  _,depth = head[4].split(':')
  evdp = depth.replace(' ','')
  hd['sac'].evdp = float(evdp)
  # Retrieve MAG
  _,mags = head[5].split(':')
  # Check if multiple Magnitude types are associated with the earthquake
  if ',' in mags:
    mag,imagtyp = mag_seperator(mags)
    hd['sac'].imagtyp = imagtyp
  else:
    _, mag,imagtyp = mags.split(' ')
    hd['sac'].imagtyp = mag_type(imagtyp)
  hd['sac'].mag = float(mag.replace(',','.'))
  # Retrieve Station Information
  # Assign Network
  hd['network'] = 'TK'
  # Assing Location
  hd['location'] = 00
  # Retrieve KSTNM
  _,stnm = head[6].split(':')
  kstnm = stnm.replace(' ','')
  hd['station'] = kstnm
  # Retrieve STLA, STLO
  _,coors = head[7].split(':')
  # Remove space, N and E 
  coors = coors.replace(' ','')
  coors = coors.replace('N','')
  coors = coors.replace('E','')
  stla,stlo = coors.split('-')
  hd['sac'].stla = float(stla.replace(',','.')); hd['sac'].stlo = float(stlo.replace(',','.'))
  # Retrieve STEL
  _,el = head[8].split(':')
  stel = el.replace(' ','')
  hd['stel'] = float(stel.replace(',','.'))
  # Retrieve Record Information
  # Retrieve Recordtime
  s = ''.join(i for i in head[11] if i.isdigit())
  starttime = datetime.strptime(s, '%d%m%Y%H%M%S%f')
  hd['starttime'] = UTCDateTime(starttime)
  hd['sac'].o = UTCDateTime(starttime) - UTCDateTime(evtime) 
  # Retrieve NPTS
  _,nptss = head[12].split(':')
  npts = nptss.replace(' ','')
  hd['npts'] = int(npts)
  # Retrieve DELTA
  _,dt = head[13].split(':')
  delta = dt.replace(' ','')
  hd['delta'] = float(delta.replace(',','.'))
  hd['sampling_rate'] = 1/hd['delta']
  hd['endtime'] = hd['starttime'] + hd['npts']*hd['delta']
  hd['sac'].lcalda = 1; hd['sac'].lovrok = 1
  eqfile.close()
  # Read Waveform
  with open(txt,encoding='iso-8859-9') as eqfile:
    wfs = eqfile.readlines()[18:]
  wfs = [line.rstrip('\n') for line in wfs]
  wfs = [line.split(' ') for line in wfs]
  wfs = [list(filter(None, line)) for line in wfs]
  e = []; n = []; z = [];
  for line in wfs:
    n.append(line[0])
    e.append(line[1])
    z.append(line[2])
  #East
  tracee = Trace(np.asarray(e))
  hd['channel'] = 'HGE'
  tracee.stats = hd
  st = Stream(traces=[tracee])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #North
  tracen = Trace(np.asarray(n))
  hd['channel'] = 'HGN'
  tracen.stats = hd
  st = Stream(traces=[tracen])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  #Vertical
  tracez = Trace(np.asarray(z))
  hd['channel'] = 'HGZ'
  tracez.stats = hd
  st = Stream(traces=[tracez])
  st.write(os.path.join(output_dir,st[0].id + '.SAC'), format='SAC')
  return
Exemplo n.º 5
0
def upsample(trace, upfactor, starttime, endtime):
    """
    Upsample a data stream by a given factor, prior to decimation. The
    upsampling is carried out by linear interpolation.

    NOTE: assumes any data with off-sample timing has been corrected with
    :func:`~quakemigrate.util.shift_to_sample`. If not, the resulting traces
    may not all contain the correct number of samples (and desired start
    and end times).

    Parameters
    ----------
    trace : `obspy.Trace` object
        Trace to be upsampled.
    upfactor : int
        Factor by which to upsample the data in trace.

    Returns
    -------
    out : `obpsy.Trace` object
        Upsampled trace.

    """

    data = trace.data
    # Fenceposts
    dnew = np.zeros((len(data) - 1) * upfactor + 1)
    dnew[::upfactor] = data
    for i in range(1, upfactor):
        dnew[i::upfactor] = float(i)/upfactor*data[1:] \
                        + float(upfactor - i)/upfactor*data[:-1]

    # Check if start needs pad - if so pad with constant value (start value
    # of original trace). Use inequality here to only apply padding to data at
    # the start and end of the requested time window; not for other traces
    # floating in the middle (in the case that there are gaps).
    if 0. < trace.stats.starttime - starttime < trace.stats.delta:
        logging.debug(f"Mismatched starttimes: {trace.stats.starttime}, "
                      f"{starttime}")
        # Calculate how many additional samples are needed
        start_pad = np.round((trace.stats.starttime - starttime) \
            * trace.stats.sampling_rate * upfactor)
        logging.debug(f"Start pad = {start_pad}")
        # Add padding data (constant value)
        start_fill = np.full(np.int(start_pad), trace.data[0], dtype=int)
        dnew = np.append(start_fill, dnew)
        # Calculate new starttime of trace
        new_starttime = trace.stats.starttime - start_pad \
            / (trace.stats.sampling_rate * upfactor)
        logging.debug(f"New starttime = {new_starttime}")
    else:
        new_starttime = trace.stats.starttime

    # Ditto for end of trace
    if 0. < endtime - trace.stats.endtime < trace.stats.delta:
        logging.debug(f"Mismatched endtimes: {trace.stats.endtime}, {endtime}")
        # Calculate how many additional samples are needed
        end_pad = np.round((endtime - trace.stats.endtime) \
            * trace.stats.sampling_rate * upfactor)
        logging.debug(f"End pad = {end_pad}")
        # Add padding data (constant value)
        end_fill = np.full(np.int(end_pad), trace.data[-1], dtype=int)
        dnew = np.append(dnew, end_fill)

    out = Trace()
    out.data = dnew
    out.stats = trace.stats.copy()
    out.stats.npts = len(out.data)
    out.stats.starttime = new_starttime
    out.stats.sampling_rate = int(upfactor * trace.stats.sampling_rate)
    logging.debug(f"Raw upsampled trace:\n\t{out}")

    # Trim to remove additional padding left from reading with
    # nearest_sample=True at a variety of sampling rates.
    # NOTE: here we are using nearest_sample=False, as all data in the stream
    # should now be at a *multiple* of the desired sampling rate, and with any
    # off-sample data having had it's timing shifted.
    out.trim(starttime=starttime - 0.00001,
             endtime=endtime + 0.00001,
             nearest_sample=False)
    logging.debug(f"Trimmed upsampled trace:\n\t{out}")

    return out