Exemplo n.º 1
0
def ascii_specfem2d_obspy(**kwargs):
    """ Reads seismic traces from text files
    """
    from obspy.core.stream import Stream
    from obspy.core.trace import Trace

    filenames = glob(solver='specfem2d', **kwargs)

    t = _np.loadtxt(files[0])[:,0]
    nt = len(t)
    nr = len(filenames)

    d = Trace(data=np.zeros(nt, dtype='float32'))

    trace.stats.starttime = t[0]
    trace.stats.delta = _np.mean(_np.diff(t))
    trace.stats.nt = len(t)

    # read data
    stream = Stream(t)*nr

    for filename in filenames:
        stream.data = _np.loadtxt(filename)[:, 1]

    return stream
Exemplo n.º 2
0
class drumPlot(Client):

    _file = 'tr.mseed'  # 'traces.mseed'
    _traces = Stream()
    _inv = read_inventory("metadata/Braskem_metadata.xml")
    _rtSft = rtSft
    _lastData = UTCDateTime.now()
    _traces = Stream()
    _2minRTraces = Stream()
    _appTrace = Stream()
    _drTrace = Stream()
    _drHTrace = Stream()
    _rTWindow = rTWindow
    _tEnd = UTCDateTime.now()
    _tNow = UTCDateTime.now()
    _rtRunning = False
    _hyRunning = False
    _saving = False
    _elRunning = False
    _status = {}
    _elab = {}
    _elabHyst = {}
    _events = []
    _polAn = {
        'polWinLen': 3,
        'polWinFr': .1,
        'fLow': 4,
        'fHigh': 12,
        'plTh': 0.7
    }
    _polAnResult = []

    def plotDrum(self, trace, filename='tmp.png'):
        print(trace.get_id())
        try:
            trace.data = trace.data * 1000 / 3.650539e+08
            #im,ax=plt.subplots()
            if not os.path.exists(os.path.dirname(filename)):
                os.makedirs(os.path.dirname(filename))

            #im=
            trace.plot(
                type='dayplot',
                dpi=dpi,
                x_labels_size=int(8 * 100 / int(dpi)),
                y_labels_size=int(8 * 100 / int(dpi)),
                title_size=int(1000 / int(dpi)),
                title=self._tEnd.strftime("%Y/%m/%d %H:%M:%S"),
                size=(sizex, sizey),
                color=('#AF0000', '#00AF00', '#0000AF'),
                vertical_scaling_range=yRange,
                outfile=filename,
                #handle=True,
                time_offset=-3,
                data_unit='mm/s',
                events=self._events)
            #            im.savefig(filename)
            #            plt.close(im)

            return True
        except:
            print('ops,something wrong in plotting!!')
            return False

    def realTimeDrumPlot(self):
        print('start ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        appTrace = Stream()
        self._rtRunning = True
        for tr in self._traces:
            id = tr.get_id()
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]
            l = int(self._tEnd - tr.stats['endtime'])
            self._status[station] = {}
            self._status[station]["Noise Level"] = "---"
            self._status[station]["Latency"] = str(l) + 's'
            self._status[station]["Voltage"] = "---"
            self._status[station]["Color"] = "#FF0000"

            for b in band:
                fileNameRT = 'RT_' + network + '_' + station + '_' + channel + '_' + str(
                    b) + '.png'
                appTrace = tr.copy()
                bb = band[b]
                appTrace.trim(self._tEnd - self._rTWindow * 60,
                              self._tEnd,
                              pad=True,
                              fill_value=0)
                appTrace.filter('bandpass',
                                freqmin=bb[0],
                                freqmax=bb[1],
                                corners=2,
                                zerophase=True)
                self.plotDrum(appTrace, self._basePathRT + 'RT/' + fileNameRT)

        with open(self._basePathRT + 'RT/geophone_network_status.json',
                  'w') as fp:
            json.dump(self._status, fp)
            fp.close()

        print('end ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        self._rtRunning = False

    def hystDrumPlot(self, tEnd=0):

        appTrace = Stream()
        self._hyRunning = True
        if tEnd == 0:
            tEnd = self._tEnd
        else:
            self._tEnd = tEnd
        print('Hyststart ' + tEnd.strftime("%Y%m%d %H%M%S"))
        for tr in self._traces:
            id = tr.get_id()
            # print('hyst '+id)
            spl = id.split('.')
            network = spl[0]
            station = spl[1]
            channel = spl[3]

            for h in hystType:

                if tEnd.hour % int(h / 60) == 0:
                    for b in band:
                        tStart = tEnd - h * 60
                        p = network + '/' + station + '/' + channel + '/' + str(
                            tStart.year) + '/' + str(tStart.month) + '/' + str(
                                tStart.day) + '/' + str(h) + '/' + str(b)

                        fileName = p + '/' + tStart.strftime(
                            "%Y%m%d%H"
                        ) + '00.png'  # + '_' + (self._tEnd-60).strftime(
                        #"%Y%m%d%H") + '.png'

                        appTrace = tr.copy()
                        bb = band[b]
                        appTrace.trim(tStart, tEnd, pad=True, fill_value=0)
                        appTrace.filter('bandpass',
                                        freqmin=bb[0],
                                        freqmax=bb[1],
                                        corners=2,
                                        zerophase=True)
                        self.plotDrum(appTrace, self._basePath + fileName)

        self._hyRunning = False

    def hystElab(self):
        tStart = self._tEnd - 1440 * 60
        for e in self._elabHyst:
            p = e.split('_')
            network = p[0]
            station = p[1]
            p = self._basePath + network + '/' + station + '/' + 'ELAB' + '/' + str(
                tStart.year) + '/' + str(tStart.month) + '/' + str(
                    tStart.day) + '/' + tStart.strftime(
                        "%Y%m%d%H") + '00.json'  #ELAB_' + e + '.json'
            if not os.path.exists(os.path.dirname(p)):
                os.makedirs(os.path.dirname(p))
            # el = self._elabHyst[e]
            with open(p, 'w') as fp:
                json.dump(list(self._elabHyst[e].values()), fp)
                fp.close()
            self._elabHyst[e] = {}

    def elab(self):
        self._elRunning = True

        self._2minRTraces = self._traces.copy()
        self._2minRTraces.trim(self._tEnd - 120, self._tEnd)
        self._2minRTraces.remove_response(self._inv)

        tStart = self._tEnd - 60
        s = np.asarray(self.get_all_nslc())

        intTrace = self._2minRTraces.copy()
        intTrace.trim(tStart, self._tEnd)

        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):
                print('elab ' + station)
                stTrace = intTrace.select(network, station)
                elab = {'ts': np.long(self._tEnd.strftime("%Y%m%d%H%M%S"))}
                # TREMOR
                nTr = network + '_' + station
                # f = self.elabWhere(nTr, (self._tEnd - 3600).strftime("%Y%m%d%H%M%S"),
                #                    self._tEnd.strftime("%Y%m%d%H%M%S"))
                for appTrace in stTrace:
                    rms = {}
                    id = appTrace.get_id()
                    spl = id.split('.')
                    channel = spl[3]
                    elab[channel] = {}
                    # tStart = self._tEnd - 60
                    # appTrace = tr.copy()
                    # appTrace.trim(tStart, self._tEnd)
                    # appTrace.remove_response(self._inv)

                    for b in band:
                        bb = band[b]
                        trF = appTrace.copy()
                        trF.filter('bandpass',
                                   freqmin=bb[0],
                                   freqmax=bb[1],
                                   corners=2,
                                   zerophase=True)
                        rms[b] = np.sqrt(np.mean(trF.data**2))
                        elab[channel]['rms_' + b] = str("%0.2e" % rms[b])
                        # HC_rms = np.sum([float(s[channel]['rms_' + b]) for s in f])
                        # elab[channel]['HC_rms_' + b] = str("%0.2e" % HC_rms)

                try:
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr][elab['ts']] = elab
                except:
                    self._elab[nTr] = {}
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr] = {}
                    self._elabHyst[nTr][elab['ts']] = elab

                # pulisco e slavo
                m = np.long((self._tEnd - 1440 * 60).strftime("%Y%m%d%H%M%S"))
                mm = np.min(list(self._elab[nTr].keys()))
                if mm < m:
                    self._elab[nTr].pop(mm)
                for e in self._elab:
                    filename = self._basePathRT + 'RT/ELAB_' + e + '.json'

                    with open(filename, 'w') as fp:
                        json.dump(list(self._elab[e].values()), fp)
                        fp.close()

                # except:
                #     print('failed elab in '+station)
                #     pass

        np.savez(self._basePath + 'elSave', h=self._elabHyst, e=self._elab)
        self._elRunning = False

    def elabWhere(self, id, ts, te):
        r = []
        ts = np.long(ts)
        te = np.long(te)
        try:
            for x in (y for y in self._elab[id].keys() if (y > ts) & (y < te)):
                r.append(self._elab[id][x])
        except:
            pass
        return r

    def polAn(self):
        a = alert()
        appTrace = self._2minRTraces.copy()
        ts = self._tEnd - 120
        te = self._tEnd - 10

        appTrace.filter('bandpass',
                        freqmin=self._polAn['fLow'],
                        freqmax=self._polAn['fHigh'],
                        corners=2,
                        zerophase=True)
        s = np.asarray(self.get_all_nslc())
        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):
                nTr = network + '_' + station
                try:
                    print('polarizzation analisys ' + station)
                    stTrace = appTrace.select(network, station)
                    u = obspy.signal.polarization.polarization_analysis(
                        stTrace, self._polAn['polWinLen'],
                        self._polAn['polWinFr'], self._polAn['fLow'],
                        self._polAn['fHigh'], ts, te, False, 'flinn')

                    x = np.where(u['planarity'] > self._polAn['plTh'])
                    ur = {k: u[k][x] for k in u.keys()}
                    for u in ur:
                        a._a['utc_time'] = UTCDateTime(
                            u['timestamp']).strftime("%Y-%m-%d %H:%M:%S")
                        a._a['utc_time_str'] = UTCDateTime(
                            u['timestamp']).strftime("%Y-%m-%d %H:%M:%S")
                        a._a['event_type'] = "PL"
                        a._a['station'] = nTr
                        a._a['linearity'] = u['planarity']
                        a._a['az'] = a['azimuth']
                        a._a['tkoff'] = a['incidence']

                    a._a = {
                        'id_alert': '',
                        'utc_time': '',
                        'utc_time_str': '',
                        'event_type': '',
                        'station': '',
                        'channel': '',
                        'amplitude': '',
                        'linearity': '',
                        'az': '',
                        'tkoff': '',
                        'freq': '',
                        'lat': '',
                        'lon': '',
                        'note': ''
                    }
                except:
                    print('polarizzation analisys ' + station + ' failed')

    def run(self,
            network,
            station,
            channel,
            tStart=UTCDateTime.now(),
            rt=True):
        logging.basicConfig(filename='log.log',
                            level='WARNING',
                            format='%(asctime)s %(message)s')

        r = False

        try:

            data = np.load(self._basePath + 'elSave.npz')
            self._elab = data['e'].tolist()
            self._elabHyst = data['h'].tolist()
        except:

            pass

        self._stationData = {
            'BRK0': self._inv.get_coordinates('LK.BRK0..EHZ'),
            'BRK1': self._inv.get_coordinates('LK.BRK1..EHZ'),
            'BRK2': self._inv.get_coordinates('LK.BRK2..EHZ'),
            'BRK3': self._inv.get_coordinates('LK.BRK3..EHZ'),
            'BRK4': self._inv.get_coordinates('LK.BRK4..EHZ'),
        }

        with open(self._basePathRT + 'elab_status.json', 'w') as fp:
            json.dump(self._stationData, fp)
            fp.close()

        self._tNow = tStart
        while 1 < 2:

            if self._tNow > UTCDateTime.now() - 5:
                time.sleep(5)
                self._tNow = UTCDateTime.now()
                print(self._tNow)
            else:
                self._tNow += 10
                if (not rt) & (not self._rtRunning) & (not self._hyRunning) & (
                        not self._saving) & (not self._elRunning):

                    print('sk')
                    print(self._tNow)

            if self._tNow.second < self._lastData.second:
                self._tEnd = self._tNow

                print('getting traces')
                try:
                    self._traces = self.get_waveforms(network, station, '',
                                                      channel,
                                                      self._tEnd - 720 * 60,
                                                      self._tEnd)
                    self._traces.merge(fill_value=0)
                except:
                    print('failed to get traces')

                if (self._tNow.minute % self._rtSft == 0) & (
                        self._lastData.minute % self._rtSft != 0) & rt:
                    print('getting events')
                    try:
                        self.getCasp()
                    except:
                        print('events failed')
                        pass

                    try:
                        self.pushEv()
                    except:
                        print('push events failed')
                        pass

                    if (not self._rtRunning) & rt:
                        pRt = multiprocessing.Process(
                            target=self.realTimeDrumPlot)
                        pRt.start()

                if (not self._elRunning):
                    self.elab()
                    self.polAn()

                if (self._tEnd.minute == 0) & (self._lastData.minute != 0):
                    if not self._hyRunning:
                        pHy = multiprocessing.Process(target=self.hystDrumPlot)
                        pHy.start()

                if self._tNow.hour < self._lastData.hour:
                    self.hystElab()

            self._lastData = self._tNow

    def getCasp(self):
        connection = psycopg2.connect(host='172.16.8.10',
                                      port='5432',
                                      database='casp_events',
                                      user='******',
                                      password='******')
        sql = 'SELECT event_id, t0, lat, lon, dpt, magWA FROM auto_eventi'
        cursor = connection.cursor()
        cursor.execute(sql)
        p = cursor.fetchall()
        self._events = []
        for pp in p:
            e = {
                'id': pp[0],
                'time': UTCDateTime(pp[1]),
                'text': 'CASP ev. mag' + str(pp[5]),
                'lat': np.float(pp[2]),
                'lon': np.float(pp[3]),
                'dpt': np.float(pp[4]),
                'mag': np.float(pp[5])
            }
            self._events.append(e)

    def pushEv(self):
        connection = psycopg2.connect(host='80.211.98.179',
                                      port='5432',
                                      user='******',
                                      password='******')
        for e in self._events:

            sql = 'INSERT INTO seismic.events_casp (geom,lat,lon,utc_time,utc_time_str,magnitudo,depth,id_casp) ' \
                  "VALUES (ST_GeomFromText('POINT(" + str(e['lon']) + ' ' + str(e['lat']) + ")', 4326),"\
                  + str(e['lat']) + ','+ str(e['lon'])+ ",'"+  str(UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S"))+ "','"+  str(UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S"))+"',"+str(e['mag'])+','+ str(e['dpt'])  +','+e['id']+") ON CONFLICT DO NOTHING;"
            connection.cursor().execute(sql)
            connection.commit()

    def pushIntEv(self, e, table='seismic.events_swarm', id='id_swarm'):
        connection = psycopg2.connect(host='80.211.98.179',
                                      port='5432',
                                      user='******',
                                      password='******')
        #for e in events:

        sql = 'INSERT INTO '+table+ ' (geom,note,lat,lon,utc_time,utc_time_str,magnitudo,depth,'+id+') ' \
              "VALUES (ST_GeomFromText('POINT(" + str(e['lon']) + ' ' + str(e['lat']) + ")', 4326)" \
              + ",'" + e['note'] + "'," + str(e['lat']) + ',' + str(e['lon']) + ",'" + str(
            UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S")) + "','" + str(
            UTCDateTime(e['time']).strftime("%Y-%m-%d %H:%M:%S")) + "'," + str(e['mag']) + ',' + str(e['dpt']) + ",'" + \
              e['id'] + "') ON CONFLICT DO NOTHING;"
        connection.cursor().execute(sql)
        connection.commit()
Exemplo n.º 3
0
def get_s2s_stream(dicread):
    traces = []
    for v in dicread.values():
        if v[0] is None:
            traces.extend(get_stream(v[1]).traces)
    return Stream(traces)
Exemplo n.º 4
0
def find_LFEs(family_file, station_file, template_dir, tbegin, tend, \
    TDUR, duration, filt, freq0, dt, nattempts, waittime, type_threshold='MAD', \
    threshold=0.0075):
    """
    Find LFEs with the temporary stations from FAME
    using the templates from Plourde et al. (2015)

    Input:
        type family_file = string
        family_file = File containing the list of LFE families
        type station_file = string
        station_file = File containing the list of stations
        type template_dir = string
        template_dir = Directory where to find the LFE templates
        type tbegin = tuplet of 6 integers
        tbegin = Time when we begin looking for LFEs
        type tend = tuplet of 6 integers
        tend = Time we stop looking for LFEs
        type TDUR = float
        TDUR = Time to add before and after the time window for tapering
        type duration = float
        duration = Duration of the LFE templates
        type filt = tuple of floats
        filt = Lower and upper frequencies of the filter
        type freq0 = float
        freq0 = Maximum frequency rate of LFE occurrence
        type dt = float
        dt = Time step for the LFE templates
        type nattempts = integer
        nattempts = Number of times we try to download data
        type waittime = positive float
        waittime = Type to wait between two attempts at downloading
        type type_threshold = string
        type_threshold = 'MAD' or 'Threshold'
        type threshold = float
        threshold = Cross correlation value must be higher than that
    Output:
        None
    """

    # Get the network, channels, and location of the stations
    staloc = pd.read_csv(station_file, \
        sep=r'\s{1,}', header=None, engine='python')
    staloc.columns = ['station', 'network', 'channels', 'location', \
        'server', 'latitude', 'longitude', 'time_on', 'time_off']

    # Begin and end time of analysis
    t1 = UTCDateTime(year=tbegin[0], month=tbegin[1], \
        day=tbegin[2], hour=tbegin[3], minute=tbegin[4], \
        second=tbegin[5])
    t2 = UTCDateTime(year=tend[0], month=tend[1], \
        day=tend[2], hour=tend[3], minute=tend[4], \
        second=tend[5])

    # Number of hours of data to analyze
    nhour = int(ceil((t2 - t1) / 3600.0))

    # Begin and end time of downloading
    Tstart = t1 - TDUR
    Tend = t2 + duration + TDUR

    # Temporary directory to store the data
    namedir = 'tmp'
    if not os.path.exists(namedir):
        os.makedirs(namedir)

    # Download the data from the stations
    for ir in range(0, len(staloc)):
        station = staloc['station'][ir]
        network = staloc['network'][ir]
        channels = staloc['channels'][ir]
        location = staloc['location'][ir]
        server = staloc['server'][ir]
        time_on = staloc['time_on'][ir]
        time_off = staloc['time_off'][ir]

        # File to write error messages
        namedir = 'error'
        if not os.path.exists(namedir):
            os.makedirs(namedir)
        errorfile = 'error/' + station + '.txt'

        # Check whether there are data for this period of time
        year_on = int(time_on[0:4])
        month_on = int(time_on[5:7])
        day_on = int(time_on[8:10])
        year_off = int(time_off[0:4])
        month_off = int(time_off[5:7])
        day_off = int(time_off[8:10])
        if ((Tstart > UTCDateTime(year=year_on, month=month_on, day=day_on)) \
           and (Tend < UTCDateTime(year=year_off, month=month_off, day=day_off))):

            # First case: we can get the data from IRIS
            if (server == 'IRIS'):
                (D, orientation) = get_from_IRIS(station, network, channels, \
                    location, Tstart, Tend, filt, dt, nattempts, waittime, \
                    errorfile, DATADIR)
            # Second case: we get the data from NCEDC
            elif (server == 'NCEDC'):
                (D, orientation) = get_from_NCEDC(station, network, channels, \
                    location, Tstart, Tend, filt, dt, nattempts, waittime, \
                    errorfile, DATADIR)
            else:
                raise ValueError(
                    'You can only download data from IRIS and NCEDC')

            # Store the data into temporary files
            if (type(D) == obspy.core.stream.Stream):
                D.write('tmp/' + station + '.mseed', format='MSEED')
                namefile = 'tmp/' + station + '.pkl'
                pickle.dump(orientation, open(namefile, 'wb'))

    # Loop on families
    families = pd.read_csv(family_file, \
        sep=r'\s{1,}', header=None, engine='python')
    families.columns = ['family', 'stations']
    for i in range(0, len(families)):

        # Create directory to store the LFEs times
        namedir = 'LFEs/' + families['family'].iloc[i]
        if not os.path.exists(namedir):
            os.makedirs(namedir)

        # File to write error messages
        namedir = 'error'
        if not os.path.exists(namedir):
            os.makedirs(namedir)
            errorfile = 'error/' + families['family'].iloc[i] + '.txt'

        # Create dataframe to store LFE times
        df = pd.DataFrame(columns=['year', 'month', 'day', 'hour', \
        'minute', 'second', 'cc', 'nchannel'])

        # Read the templates
        stations = families['stations'].iloc[i].split(',')
        templates = Stream()
        for station in stations:
            templatefile = template_dir + '/' + \
                families['family'].iloc[i] + '/' + station + '.pkl'
            with open(templatefile, 'rb') as f:
                data = pickle.load(f)
            if (len(data) == 3):
                EW = data[0]
                NS = data[1]
                UD = data[2]
                EW.stats.station = station
                NS.stats.station = station
                EW.stats.channel = 'E'
                NS.stats.channel = 'N'
                templates.append(EW)
                templates.append(NS)
            else:
                UD = data[0]
            UD.stats.station = station
            UD.stats.channel = 'Z'
            templates.append(UD)

        # Loop on hours of data
        for hour in range(0, nhour):
            nchannel = 0
            Tstart = t1 + hour * 3600.0
            Tend = t1 + (hour + 1) * 3600.0 + duration
            delta = Tend - Tstart
            ndata = int(delta / dt) + 1

            # Get the data
            data = []
            for station in stations:
                try:
                    D = read('tmp/' + station + '.mseed')
                    D = D.slice(Tstart, Tend)
                    namefile = 'tmp/' + station + '.pkl'
                    orientation = pickle.load(open(namefile, 'rb'))

                    # Get station metadata for reading response file
                    for ir in range(0, len(staloc)):
                        if (station == staloc['station'][ir]):
                            network = staloc['network'][ir]
                            channels = staloc['channels'][ir]
                            location = staloc['location'][ir]
                            server = staloc['server'][ir]

                    # Orientation of template
                    # Date chosen: April 1st 2008
                    mychannels = channels.split(',')
                    mylocation = location
                    if (mylocation == '--'):
                        mylocation = ''
                    response = os.path.join(
                        DATADIR,
                        'response/') + network + '_' + station + '.xml'
                    inventory = read_inventory(response, format='STATIONXML')
                    reference = []
                    for channel in mychannels:
                        angle = inventory.get_orientation(network + '.' + \
                            station + '.' + mylocation + '.' + channel, \
                            UTCDateTime(2020, 1, 1, 0, 0, 0))
                        reference.append(angle)

                    # Append data to stream
                    if (type(D) == obspy.core.stream.Stream):
                        stationdata = fill_data(D, orientation, station,
                                                channels, reference)
                        if (len(stationdata) > 0):
                            for stream in stationdata:
                                data.append(stream)
                except:
                    message = 'No data available for station {} '.format( \
                        station) + 'at time {}/{}/{} - {}:{}:{}\n'.format( \
                        Tstart.year, Tstart.month, Tstart.day, Tstart.hour, \
                        Tstart.minute, Tstart.second)

            # Loop on channels
            for channel in range(0, len(data)):
                subdata = data[channel]
                # Check whether we have a complete one-hour-long recording
                if (len(subdata) == 1):
                    if (len(subdata[0].data) == ndata):
                        # Get the template
                        station = subdata[0].stats.station
                        component = subdata[0].stats.channel
                        template = templates.select(station=station, \
                            component=component)[0]
                        # Cross correlation
                        cctemp = correlate.optimized(template, subdata[0])
                        if (nchannel > 0):
                            cc = np.vstack((cc, cctemp))
                        else:
                            cc = cctemp
                        nchannel = nchannel + 1

            if (nchannel > 0):
                # Compute average cross-correlation across channels
                meancc = np.mean(cc, axis=0)
                if (type_threshold == 'MAD'):
                    MAD = np.median(np.abs(meancc - np.mean(meancc)))
                    index = np.where(meancc >= threshold * MAD)
                elif (type_threshold == 'Threshold'):
                    index = np.where(meancc >= threshold)
                else:
                    raise ValueError(
                        'Type of threshold must be MAD or Threshold')
                times = np.arange(0.0, np.shape(meancc)[0] * dt, dt)

                # Get LFE times
                if np.shape(index)[1] > 0:
                    (time, cc) = clean_LFEs(index, times, meancc, dt, freq0)

                    # Add LFE times to dataframe
                    i0 = len(df.index)
                    for j in range(0, len(time)):
                        timeLFE = Tstart + time[j]
                        df.loc[i0 + j] = [int(timeLFE.year), int(timeLFE.month), \
                            int(timeLFE.day), int(timeLFE.hour), \
                            int(timeLFE.minute), timeLFE.second + \
                            timeLFE.microsecond / 1000000.0, cc[j], nchannel]

        # Add to pandas dataframe and save
        df_all = df
        df_all = df_all.astype(dtype={'year':'int32', 'month':'int32', \
            'day':'int32', 'hour':'int32', 'minute':'int32', \
            'second':'float', 'cc':'float', 'nchannel':'int32'})
        df_all.to_csv('LFEs/' + families['family'].iloc[i] + '/catalog_' + \
            '{:04d}{:02d}{:02d}_{:02d}{:02d}{:02d}'.format(tbegin[0], \
            tbegin[1], tbegin[2], tbegin[3], tbegin[4], tbegin[5]) + '.csv')
Exemplo n.º 5
0
 def _plot_kurtosis(wl, fb, trace, kurtosis, corr_cum):
     print(f'Plot kurtosis for win_len={wl}, f_band={fb}', 'debug')
     cor = corr_cum.copy()
     cor.stats.channel = 'COR'
     kurtosis.stats.channel = 'KUR'
     Stream([trace, kurtosis, cor]).plot(size=(600, 600), equal_scale=False)
Exemplo n.º 6
0
    mseed_dir_in + '2020/ANMA/ANMA.EN..HH%s.2020.%03d' % (j, k)
    for j in ['E', 'N', 'Z'] for k in np.arange(0, 78)
])
bad_files = np.append(bad_files_1, bad_files_2)

# get list of all days and of unique days
days_all = np.array([
    datetime.strptime(a[-8:], '%Y.%j').strftime('%Y.%m.%d') for a in dayfiles
])
days = np.unique(days_all)

for dy in days:
    file_list = dayfiles[np.where(
        days_all == dy)]  # files for this day, all stations

    st = Stream()
    for fl in file_list:
        if fl not in bad_files and os.stat(fl).st_size != 0:
            st += read(fl)
    if len(st) > 0:
        # decimate or resample to 1Hz
        if np.all([tr.stats.sampling_rate == 100 for tr in st]):
            st.decimate(100, no_filter=True)
        else:
            st.resample(1.0, no_filter=True)

        st.merge(method=1, fill_value=0)  # make sure 1 trace per sta/comp

        for tr in st:  # rename channels to L* to match [edited] dataless
            tr.stats.channel = 'L' + tr.stats.channel[1:]
Exemplo n.º 7
0
def dataClean(alltrigs, opt, flag=1):

    """
    Examine triggers and weed out spikes and calibration pulses using kurtosis and
    outlier ratios
    
    alltrigs: triggers output from triggering
    opt: opt from config
    flag: 1 if defining window to check, 0 if want to check whole waveform for spikes
        (note that different threshold values should be used for different window lengths)
    
    Returns good trigs (trigs) and several junk types (junk, junkFI, junkKurt)
    """
    
    trigs=Stream()
    junkFI=Stream()
    junkKurt=Stream()
    junk=Stream()
    for i in range(len(alltrigs)):
            
        njunk = 0
        ntele = 0
        
        for n in range(opt.nsta):
            
            dat = alltrigs[i].data[n*opt.wshape:(n+1)*opt.wshape]
            if flag == 1:
                datcut=dat[range(int((opt.ptrig-opt.kurtwin/2)*opt.samprate),
                    int((opt.ptrig+opt.kurtwin/2)*opt.samprate))]
            else:
                datcut=dat
            
            if np.sum(np.abs(dat))!=0.0:
                # Calculate kurtosis in window
                k = stats.kurtosis(datcut)
                # Compute kurtosis of frequency amplitude spectrum next
                datf = np.absolute(fft(dat))
                kf = stats.kurtosis(datf)
                # Calculate outlier ratio using z ((data-median)/mad)
                mad = np.nanmedian(np.absolute(dat - np.nanmedian(dat)))
                z = (dat-np.median(dat))/mad
                # Outliers have z > 4.45
                orm = len(z[z>4.45])/np.array(len(z)).astype(float)
            
                if k >= opt.kurtmax or orm >= opt.oratiomax or kf >= opt.kurtfmax:
                    njunk+=1
                
                winstart = int(opt.ptrig*opt.samprate - opt.winlen/10)
                winend = int(opt.ptrig*opt.samprate - opt.winlen/10 + opt.winlen)
                fftwin = np.reshape(fft(dat[winstart:winend]),(opt.winlen,))
                if np.median(np.abs(dat[winstart:winend]))!=0:
                    fi = np.log10(np.mean(np.abs(np.real(
                        fftwin[int(opt.fiupmin*opt.winlen/opt.samprate):int(
                        opt.fiupmax*opt.winlen/opt.samprate)])))/np.mean(np.abs(np.real(
                        fftwin[int(opt.filomin*opt.winlen/opt.samprate):int(
                        opt.filomax*opt.winlen/opt.samprate)]))))
                    if fi<opt.telefi:
                        ntele+=1
        
        # Allow if there are enough good stations to correlate
        if njunk <= (opt.nsta-opt.ncor) and ntele <= opt.teleok:
            trigs.append(alltrigs[i])
        else:
            if njunk > 0:
                if ntele > 0:
                    junk.append(alltrigs[i])
                else:
                    junkKurt.append(alltrigs[i])
            else:
                junkFI.append(alltrigs[i])
                
    return trigs, junk, junkFI, junkKurt
Exemplo n.º 8
0
def trigger(st, stC, rtable, opt):

    """
    Run triggering algorithm on a stream of data.

    st: OBSPy stream of data
    rtable: Repeater table contains reference time of previous trigger in samples
    opt: Options object describing station/run parameters

    Returns triggered traces as OBSPy trace object updates ptime for next run 
    """
    
    tr = st[0]
    t = tr.stats.starttime

    cft = coincidence_trigger("classicstalta", opt.trigon, opt.trigoff, stC, opt.nstaC,
        sta=opt.swin, lta=opt.lwin, details=True)
            
    if len(cft) > 0:
        
        ind = 0
        
        # Slice out the data from st and save the maximum STA/LTA ratio value for
        # use in orphan expiration
        
        # Convert ptime from time of last trigger to seconds before start time
        if rtable.attrs.ptime:
            ptime = (UTCDateTime(rtable.attrs.ptime) - t)
        else:
            ptime = -opt.mintrig
                
        for n in range(len(cft)):
                    
            ttime = cft[n]['time'] # This is a UTCDateTime, not samples
            
            if (ttime >= t + opt.atrig) and (ttime >= t + ptime +
                opt.mintrig) and (ttime < t + len(tr.data)/opt.samprate -
                2*opt.atrig):
                
                ptime = ttime - t
                
                # Slice and save as first trace              
                ttmp = st.slice(ttime - opt.ptrig, ttime + opt.atrig)
                ttmp[0].data = ttmp[0].data[0:opt.wshape] - np.mean(
                    ttmp[0].data[0:opt.wshape])
                for s in range(1,len(ttmp)):
                    ttmp[0].data = np.append(ttmp[0].data, ttmp[s].data[
                        0:opt.wshape] - np.mean(ttmp[s].data[0:opt.wshape]))
                ttmp[0].stats.maxratio = np.max(cft[n]['cft_peaks'])
                if ind is 0:
                    trigs = Stream(ttmp[0])
                    ind = ind+1
                else:
                    trigs = trigs.append(ttmp[0])
                                                         
        if ind is 0:
            return []
        else:
            rtable.attrs.ptime = (t + ptime).isoformat()
            return trigs
    else:
        return []
Exemplo n.º 9
0
    def section_plot(self,
                     assoc_id,
                     files,
                     seconds_ahead=5,
                     record_length=100,
                     channel='Z'):
        station = self.assoc_db.query(Candidate.sta).\
            filter(Candidate.assoc_id == assoc_id).all()
        sta_list = []
        for sta, in station:
            sta_list.append(str(sta))
        station_single = self.assoc_db.query(Pick.sta).\
            filter(Pick.assoc_id == assoc_id).\
            filter(Pick.locate_flag == None).all()
        for sta, in station_single:
            sta_list.append(str(sta))
        # print sta_list

        eve = self.assoc_db.query(Associated).\
            filter(Associated.id == assoc_id).first()
        # Earthquakes' epicenter
        eq_lat = eve.latitude
        eq_lon = eve.longitude

        # Reading the waveforms
        ST = Stream()
        for file in files:
            st = read(file)
            ST += st

        # in case of some seismometer use channel code like BH1, BH2 or BH3,
        # resign the channel code as:
        if channel == 'E' or channel == 'e':
            Chan = 'E1'
        elif channel == 'N' or channel == 'n':
            Chan = 'N2'
        elif channel == 'Z' or channel == 'z':
            Chan = 'Z3'
        else:
            print('Please input component E, e, N, n, Z, or z,'
                  ' the default is Z')

        # Calculating distance from headers lat/lon
        ST_new = Stream()  # print ST
        for tr in ST:
            if tr.stats.channel[2] in Chan and tr.stats.station in sta_list:
                if ((tr.stats.starttime.datetime < eve.ot)
                        and (tr.stats.endtime.datetime > eve.ot)):
                    tr.trim(
                        UTCDateTime(eve.ot - timedelta(seconds=seconds_ahead)),
                        UTCDateTime(eve.ot + timedelta(seconds=record_length)))
                    ST_new += tr
        # print ST_new.__str__(extended=True)

        while True:
            ST_new_sta = []
            for tr in ST_new:
                ST_new_sta.append(tr.stats.station)
            duplicate = list(
                set([tr for tr in ST_new_sta if ST_new_sta.count(tr) > 1]))
            if not duplicate:
                break
            index = [
                i for (i, j) in enumerate(ST_new_sta) if j == duplicate[-1]
            ]
            i = 0
            while True:
                if ST_new[index[i]].stats.npts < ST_new[index[i +
                                                              1]].stats.npts:
                    del ST_new[index[i]]
                    break
                elif (ST_new[index[i]].stats.npts >=
                      ST_new[index[i + 1]].stats.npts):
                    del ST_new[index[i + 1]]
                    break
        # print ST_new.__str__(extended=True)

        ST_new.detrend('demean')
        # ST_new.filter('bandpass', freqmin=0.1, freqmax=100)

        factor = 10
        numRows = len(ST_new)
        segs, ticklocs, sta, circle_x, circle_y = [], [], [], [], []
        segs_picks, ticklocs_picks = [], []
        for tr in ST_new:
            dmax = tr.data.max()
            dmin = tr.data.min()
            data = tr.data / (dmax - dmin) * factor
            # due to float point arithmetic issue, can not use:
            #  "t = np.arange(0, tr.stats.npts / tr.stats.sampling_rate,
            #                 tr.stats.delta)"
            t = np.arange(
                0,
                round(tr.stats.npts / tr.stats.sampling_rate /
                      tr.stats.delta)) * tr.stats.delta
            segs.append(np.hstack((data[:, np.newaxis], t[:, np.newaxis])))
            lon, lat = self.tt_stations_db_3D.\
                query(Station3D.longitude, Station3D.latitude).\
                filter(Station3D.sta == tr.stats.station).first()
            # gps2DistAzimuth returns in meters, convert to km by /1000
            distance = int(
                gps2DistAzimuth(lat, lon, eq_lat, eq_lon)[0] / 1000.)
            # distance=self.assoc_db.query(Candidate.d_km).\
            #   filter(Candidate.assoc_id==assoc_id).\
            #   filter(Candidate.sta==tr.stats.station).\
            #   first()[0]#;print distance,tr.stats.station
            ticklocs.append(distance)
            sta.append(tr.stats.station)
            # DOT plot where picks are picked, notice that for vertical trace
            # plot p is queried from Pick table, s from PickModified table
            # horizontal trace plot p and s queried from PickModified table
            if channel == 'Z3':
                picks_p = self.assoc_db.query(Pick.time).\
                    filter(Pick.assoc_id == assoc_id).\
                    filter(Pick.sta == tr.stats.station).\
                    filter(Pick.chan == tr.stats.channel).\
                    filter(Pick.phase == 'P').all()
                if not picks_p:
                    picks_p = self.assoc_db.query(PickModified.time).\
                        filter(PickModified.assoc_id == assoc_id).\
                        filter(PickModified.sta == tr.stats.station).\
                        filter(PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).\
                    filter(PickModified.assoc_id == assoc_id).\
                    filter(PickModified.sta == tr.stats.station).\
                    filter(PickModified.phase == 'S').all()
                # print picks_p,picks_s
            else:
                picks_p = self.assoc_db.query(PickModified.time).\
                    filter(PickModified.assoc_id == assoc_id).\
                    filter(PickModified.sta == tr.stats.station).\
                    filter(PickModified.phase == 'P').all()
                picks_s = self.assoc_db.query(PickModified.time).\
                    filter(PickModified.assoc_id == assoc_id).\
                    filter(PickModified.sta == tr.stats.station).\
                    filter(PickModified.phase == 'S').all()
                # print picks_p,picks_s
            picks = picks_p + picks_s
            # picks=self.assoc_db.query(PickModified.time).\
            #   filter(PickModified.assoc_id == assoc_id).\
            #   filter(PickModified.sta == tr.stats.station).all()
            for pick, in picks:
                pick_delta = pick - eve.ot + timedelta(seconds=seconds_ahead)
                index = int(pick_delta.total_seconds() / tr.stats.delta)
                # print pick, eve.ot, index, len(data)
                circle_x.append(distance + data[index])
                circle_y.append(t[index])
                # BAR plot where picks are picked
                t_picks = np.array([t[index], t[index]])
                data_picks = np.array([data.min(), data.max()])
                segs_picks.append(
                    np.hstack(
                        (data_picks[:, np.newaxis], t_picks[:, np.newaxis])))
                ticklocs_picks.append(distance)
        tick_max = max(ticklocs)
        tick_min = min(ticklocs)
        offsets = np.zeros((numRows, 2), dtype=float)
        offsets[:, 0] = ticklocs
        offsets_picks = np.zeros((len(segs_picks), 2), dtype=float)
        offsets_picks[:, 0] = ticklocs_picks

        # lines = LineCollection(segs, offsets=offsets, transOffset=None,
        #                        linewidths=.25,
        #                        colors=[colorConverter.to_rgba(i) for i in
        #                                ('b','g','r','c','m','y','k')])
        # color='gray'
        lines = LineCollection(segs,
                               offsets=offsets,
                               transOffset=None,
                               linewidths=.25,
                               color='gray')
        # lines_picks = LineCollection(segs_picks, offsets=offsets_picks,
        #                              transOffset=None, linewidths=1,
        #                              color='r')
        lines_picks = LineCollection(segs_picks,
                                     offsets=offsets_picks,
                                     transOffset=None,
                                     linewidths=1,
                                     color='k')

        # print sta,ticklocs
        fig = plt.figure(figsize=(15, 8))
        ax1 = fig.add_subplot(111)
        # blue dots indicating where to cross the waveforms
        # ax1.plot(circle_x,circle_y,'o')
        ax1.plot(circle_x, circle_y, 'o', c='gray')
        # x0 = tick_min - (tick_max - tick_min) * 0.1
        x1 = tick_max + (tick_max - tick_min) * 0.1
        plt.ylim(0, record_length)
        plt.xlim(0, x1)
        ax1.add_collection(lines)
        ax1.add_collection(lines_picks)
        ax1.set_xticks(ticklocs)
        ax1.set_xticklabels(sta)
        ax1.invert_yaxis()
        ax1.xaxis.tick_top()
        # ax2 = ax1.twiny()
        # ax2.xaxis.tick_bottom()
        plt.setp(plt.xticks()[1], rotation=45)
        # xlabel('Station (km)')
        plt.xlabel('channel: ' + channel, fontsize=18)
        plt.ylabel('Record Length (s)', fontsize=18)
        # plt.title('Section Plot of Event at %s' % (tr.stats.starttime))
        # plt.tight_layout()
        plt.show()
Exemplo n.º 10
0
class Source_code:
    def __init__(self, veloc_model_taup):
        self.veloc_model = veloc_model_taup

    def get_P(self, epi, depth_m):
        model = TauPyModel(model=self.veloc_model)
        tt = model.get_travel_times(source_depth_in_km=depth_m / 1000,
                                    distance_in_degree=epi,
                                    phase_list=['P'])

        return tt[0].time

    def get_S(self, epi, depth_m):
        model = TauPyModel(model=self.veloc_model)
        tt = model.get_travel_times(source_depth_in_km=depth_m / 1000,
                                    distance_in_degree=epi,
                                    phase_list=['S'])
        return tt[0].time

    def get_window_obspy(self, seis_traces, epi, depth, time, npts):
        self.origin = seis_traces
        tt_P = self.get_P(
            epi, depth
        )  # Estimated P-wave arrival, based on the known velocity model
        tt_S = self.get_S(
            epi, depth
        )  # Estimated S-wave arrival, based on the known velocity model
        sec_per_sample = 1 / (seis_traces[0].meta.sampling_rate)
        #
        self.BW_stream = Stream()
        self.S_stream = Stream()
        self.P_stream = Stream()
        p_time = time.timestamp + tt_P
        s_time = time.timestamp + tt_S
        self.start_P = obspy.UTCDateTime(p_time - 5)
        self.start_S = obspy.UTCDateTime(s_time - 15)
        self.or_S_len = int(
            (self.start_S - time) / seis_traces.traces[0].stats.delta)
        self.or_P_len = int(
            (self.start_P - time) / seis_traces.traces[0].stats.delta)
        end_time_p = obspy.UTCDateTime(p_time + 20)
        end_time_s = obspy.UTCDateTime(s_time + 35)

        for i, trace in enumerate(seis_traces.traces):
            P_trace = Trace.slice(trace, self.start_P, end_time_p)
            self.P_len = len(P_trace)
            S_trace = Trace.slice(trace, self.start_S, end_time_s)
            self.S_len = len(S_trace)
            stream_add = P_trace.__add__(S_trace,
                                         fill_value=0,
                                         sanity_checks=True)
            zero_trace = Trace(np.zeros(npts),
                               header={
                                   "starttime": self.start_P,
                                   'delta': trace.meta.delta,
                                   "station": trace.meta.station,
                                   "network": trace.meta.network,
                                   "location": trace.meta.location,
                                   "channel": trace.meta.channel
                               })
            if 'T' in trace.meta.channel:
                total_trace = zero_trace.__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)
                total_s_trace = total_trace.copy()
            else:
                total_trace = zero_trace.__add__(stream_add,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=stream_add.data,
                                                 sanity_checks=True)
                total_s_trace = zero_trace.__add__(S_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=S_trace.data,
                                                   sanity_checks=True)
                total_p_trace = zero_trace.__add__(P_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=P_trace.data,
                                                   sanity_checks=True)
                self.P_stream.append(total_p_trace)
            self.S_stream.append(total_s_trace)
            self.BW_stream.append(total_trace)
            self.S_stream = self.BW_filter(self.S_stream)
            self.P_stream = self.BW_filter(self.P_stream)
            self.BW_stream = self.BW_filter(self.BW_stream)

    def zero_to_nan(self, values):
        """Replace every 0 with 'nan' and return a copy."""
        return [float('nan') if x == 0 else x for x in values]

    def BW_filter(self, stream):
        stream.filter('highpass', freq=1.0 / 30.0)
        # stream.filter('highpass', freq=0.5)
        stream.filter('lowpass', freq=0.75)
        # stream.filter('lowpass', freq=0.1)
        return stream

    def stack_BW_SW_Streams(self, traces_BW, traces_RW, traces_LW):
        stack_stream = traces_BW + traces_RW + traces_LW
        return stack_stream

    def stack_traces(self, stream):
        stacked_traces = np.array([])
        for trace in stream.traces:
            stacked_traces = np.append(stacked_traces, trace.data)
        return stacked_traces

    def split_traces(self, d_syn, traces_obs, time_at_receiver):
        Stream_split = Stream()
        for i, trace in enumerate(traces_obs.traces):
            new_trace = Trace(d_syn[i * len(trace):i * len(trace) +
                                    len(trace)],
                              header={
                                  "starttime": time_at_receiver,
                                  'delta': trace.meta.delta,
                                  "station": trace.meta.station,
                                  "network": trace.meta.network,
                                  "location": trace.meta.location,
                                  "channel": trace.meta.channel,
                                  "instaseis": trace.meta.instaseis
                              })
            Stream_split.append(new_trace)

        return Stream_split

    def split_BW_SW(self, BW_SW_stream, epi, depth, time_at_receiver, npts):
        BW_stream = Stream()
        R_stream = Stream()
        L_stream = Stream()
        for i in BW_SW_stream:
            if 'X' in i.id:
                BW_stream.append(i)

            elif 'R1' in i.id:
                R_stream.append(i)

            elif 'G1' in i.id:
                L_stream.append(i)

        P_S_syn, P_syn, S_syn = self.get_window_split_syn(
            BW_stream, epi, depth, time_at_receiver, npts)
        return P_S_syn, P_syn, S_syn, R_stream, L_stream

    def get_window_split_syn(self, splitted_syn, epi, depth, time_at_receiver,
                             npts):
        tt_P = self.get_P(
            epi, depth
        )  # Estimated P-wave arrival, based on the known velocity model
        tt_S = self.get_S(
            epi, depth
        )  # Estimated S-wave arrival, based on the known velocity model

        diff = tt_S - tt_P

        P_start = time_at_receiver
        P_end = obspy.UTCDateTime(P_start + 5 + 20)
        S_start = obspy.UTCDateTime(time_at_receiver.timestamp + diff)
        S_end = obspy.UTCDateTime(S_start + 5 + 20)

        p_stream = Stream()
        s_stream = Stream()
        total_stream = Stream()

        for i, trace in enumerate(splitted_syn.traces):
            P_trace = Trace.slice(trace, P_start, P_end)
            S_trace = Trace.slice(trace, S_start, S_end)
            stream_add = P_trace.__add__(S_trace,
                                         fill_value=0,
                                         sanity_checks=True)
            zero_trace = Trace(np.zeros(npts),
                               header={
                                   "starttime": P_start,
                                   'delta': trace.meta.delta,
                                   "station": trace.meta.station,
                                   "network": trace.meta.network,
                                   "location": trace.meta.location,
                                   "channel": trace.meta.channel,
                                   "instaseis": trace.meta.instaseis
                               })
            if 'T' in trace.meta.channel:
                total_trace = zero_trace.__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)
                total_s_trace = total_trace.copy()
            else:
                total_trace = zero_trace.__add__(stream_add,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=stream_add.data,
                                                 sanity_checks=True)
                total_s_trace = zero_trace.__add__(S_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=S_trace.data,
                                                   sanity_checks=True)
                total_p_trace = zero_trace.__add__(P_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=P_trace.data,
                                                   sanity_checks=True)
                p_stream.append(total_p_trace)
            s_stream.append(total_s_trace)
            total_stream.append(total_trace)
        return total_stream, p_stream, s_stream
Exemplo n.º 11
0
    def get_window_obspy(self, seis_traces, epi, depth, time, npts):
        self.origin = seis_traces
        tt_P = self.get_P(
            epi, depth
        )  # Estimated P-wave arrival, based on the known velocity model
        tt_S = self.get_S(
            epi, depth
        )  # Estimated S-wave arrival, based on the known velocity model
        sec_per_sample = 1 / (seis_traces[0].meta.sampling_rate)
        #
        self.BW_stream = Stream()
        self.S_stream = Stream()
        self.P_stream = Stream()
        p_time = time.timestamp + tt_P
        s_time = time.timestamp + tt_S
        self.start_P = obspy.UTCDateTime(p_time - 5)
        self.start_S = obspy.UTCDateTime(s_time - 15)
        self.or_S_len = int(
            (self.start_S - time) / seis_traces.traces[0].stats.delta)
        self.or_P_len = int(
            (self.start_P - time) / seis_traces.traces[0].stats.delta)
        end_time_p = obspy.UTCDateTime(p_time + 20)
        end_time_s = obspy.UTCDateTime(s_time + 35)

        for i, trace in enumerate(seis_traces.traces):
            P_trace = Trace.slice(trace, self.start_P, end_time_p)
            self.P_len = len(P_trace)
            S_trace = Trace.slice(trace, self.start_S, end_time_s)
            self.S_len = len(S_trace)
            stream_add = P_trace.__add__(S_trace,
                                         fill_value=0,
                                         sanity_checks=True)
            zero_trace = Trace(np.zeros(npts),
                               header={
                                   "starttime": self.start_P,
                                   'delta': trace.meta.delta,
                                   "station": trace.meta.station,
                                   "network": trace.meta.network,
                                   "location": trace.meta.location,
                                   "channel": trace.meta.channel
                               })
            if 'T' in trace.meta.channel:
                total_trace = zero_trace.__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)
                total_s_trace = total_trace.copy()
            else:
                total_trace = zero_trace.__add__(stream_add,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=stream_add.data,
                                                 sanity_checks=True)
                total_s_trace = zero_trace.__add__(S_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=S_trace.data,
                                                   sanity_checks=True)
                total_p_trace = zero_trace.__add__(P_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=P_trace.data,
                                                   sanity_checks=True)
                self.P_stream.append(total_p_trace)
            self.S_stream.append(total_s_trace)
            self.BW_stream.append(total_trace)
            self.S_stream = self.BW_filter(self.S_stream)
            self.P_stream = self.BW_filter(self.P_stream)
            self.BW_stream = self.BW_filter(self.BW_stream)
Exemplo n.º 12
0
    def get_window_split_syn(self, splitted_syn, epi, depth, time_at_receiver,
                             npts):
        tt_P = self.get_P(
            epi, depth
        )  # Estimated P-wave arrival, based on the known velocity model
        tt_S = self.get_S(
            epi, depth
        )  # Estimated S-wave arrival, based on the known velocity model

        diff = tt_S - tt_P

        P_start = time_at_receiver
        P_end = obspy.UTCDateTime(P_start + 5 + 20)
        S_start = obspy.UTCDateTime(time_at_receiver.timestamp + diff)
        S_end = obspy.UTCDateTime(S_start + 5 + 20)

        p_stream = Stream()
        s_stream = Stream()
        total_stream = Stream()

        for i, trace in enumerate(splitted_syn.traces):
            P_trace = Trace.slice(trace, P_start, P_end)
            S_trace = Trace.slice(trace, S_start, S_end)
            stream_add = P_trace.__add__(S_trace,
                                         fill_value=0,
                                         sanity_checks=True)
            zero_trace = Trace(np.zeros(npts),
                               header={
                                   "starttime": P_start,
                                   'delta': trace.meta.delta,
                                   "station": trace.meta.station,
                                   "network": trace.meta.network,
                                   "location": trace.meta.location,
                                   "channel": trace.meta.channel,
                                   "instaseis": trace.meta.instaseis
                               })
            if 'T' in trace.meta.channel:
                total_trace = zero_trace.__add__(S_trace,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=S_trace.data,
                                                 sanity_checks=True)
                total_s_trace = total_trace.copy()
            else:
                total_trace = zero_trace.__add__(stream_add,
                                                 method=0,
                                                 interpolation_samples=0,
                                                 fill_value=stream_add.data,
                                                 sanity_checks=True)
                total_s_trace = zero_trace.__add__(S_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=S_trace.data,
                                                   sanity_checks=True)
                total_p_trace = zero_trace.__add__(P_trace,
                                                   method=0,
                                                   interpolation_samples=0,
                                                   fill_value=P_trace.data,
                                                   sanity_checks=True)
                p_stream.append(total_p_trace)
            s_stream.append(total_s_trace)
            total_stream.append(total_trace)
        return total_stream, p_stream, s_stream
Exemplo n.º 13
0
    def split_BW_SW(self, BW_SW_stream, epi, depth, time_at_receiver, npts):
        BW_stream = Stream()
        R_stream = Stream()
        L_stream = Stream()
        for i in BW_SW_stream:
            if 'X' in i.id:
                BW_stream.append(i)

            elif 'R1' in i.id:
                R_stream.append(i)

            elif 'G1' in i.id:
                L_stream.append(i)

        P_S_syn, P_syn, S_syn = self.get_window_split_syn(
            BW_stream, epi, depth, time_at_receiver, npts)
        return P_S_syn, P_syn, S_syn, R_stream, L_stream
Exemplo n.º 14
0
def mergePreviews(stream):
    """
    Merges all preview traces in one Stream object. Does not change the
    original stream because the data needs to be copied anyway.

    :type stream: :class:`~obspy.core.stream.Stream`
    :param stream: Stream object to be merged
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: Merged Stream object.
    """
    copied_traces = copy(stream.traces)
    stream.sort()
    # Group traces by id.
    traces = {}
    dtypes = []
    for trace in stream:
        # Throw away empty traces.
        if trace.stats.npts == 0:
            continue
        if not hasattr(trace.stats, 'preview') or not trace.stats.preview:
            msg = 'Trace\n%s\n is no preview file.' % str(trace)
            raise Exception(msg)
        traces.setdefault(trace.id, [])
        traces[trace.id].append(trace)
        dtypes.append(trace.data.dtype)
    if len(traces) == 0:
        return Stream()
    # Initialize new Stream object.
    new_stream = Stream()
    for value in traces.values():
        if len(value) == 1:
            new_stream.append(value[0])
            continue
        # All traces need to have the same delta value and also be on the same
        # grid spacing. It is enough to only check the sampling rate because
        # the algorithm that creates the preview assures that the grid spacing
        # is correct.
        sampling_rates = set([tr.stats.sampling_rate for tr in value])
        if len(sampling_rates) != 1:
            msg = 'More than one sampling rate for traces with id %s.' % \
                  value[0].id
            raise Exception(msg)
        delta = value[0].stats.delta
        # Check dtype.
        dtypes = set([native_str(tr.data.dtype) for tr in value])
        if len(dtypes) > 1:
            msg = 'Different dtypes for traces with id %s' % value[0].id
            raise Exception(msg)
        dtype = dtypes.pop()
        # Get the minimum start and maximum end time for all traces.
        min_starttime = min([tr.stats.starttime for tr in value])
        max_endtime = max([tr.stats.endtime for tr in value])
        samples = int(round((max_endtime - min_starttime) / delta)) + 1
        data = np.empty(samples, dtype=dtype)
        # Fill with negative one values which corresponds to a gap.
        data[:] = -1
        # Create trace and give starttime.
        new_trace = Trace(data=data, header=value[0].stats)
        # Loop over all traces in value and add to data.
        for trace in value:
            start_index = int((trace.stats.starttime - min_starttime) / delta)
            end_index = start_index + len(trace.data)
            # Element-by-element comparison.
            data[start_index:end_index] = \
                np.maximum(data[start_index:end_index], trace.data)
        # set npts again, because data is changed in place
        new_trace.stats.npts = len(data)
        new_stream.append(new_trace)
    stream.traces = copied_traces
    return new_stream
Exemplo n.º 15
0
def find_LFEs(filename, stations, tbegin, tend, TDUR, filt, \
        freq0, nattempts, waittime, draw=False, type_threshold='MAD', \
        threshold=0.0075):
    """
    Find LFEs with the temporary stations from FAME
    using the templates from Plourde et al. (2015)

    Input:
        type filename = string
        filename = Name of the template
        type stations = list of strings
        stations = name of the stations used for the matched-filter algorithm
        type tebgin = tuplet of 6 integers
        tbegin = Time when we begin looking for LFEs
        type tend = tuplet of 6 integers
        tend = Time we stop looking for LFEs
        type TDUR = float
        TDUR = Time to add before and after the time window for tapering
        type filt = tuple of floats
        filt = Lower and upper frequencies of the filter
        type freq0 = float
        freq0 = Maximum frequency rate of LFE occurrence
        type nattempts = integer
        nattempts = Number of times we try to download data
        type waittime = positive float
        waittime = Type to wait between two attempts at downloading
        type draw = boolean
        draw = Do we draw a figure of the cross-correlation?
        type type_threshold = string
        type_threshold = 'MAD' or 'Threshold'
        type threshold = float
        threshold = Cross correlation value must be higher than that
    Output:
        None
    """

    # Get the network, channels, and location of the stations
    staloc = pd.read_csv('../data/Ducellier/stations_permanent.txt', \
        sep=r'\s{1,}', header=None, engine='python')
    staloc.columns = ['station', 'network', 'channels', 'location', \
        'server', 'latitude', 'longitude', 'time_on', 'time_off']

    # Create directory to store the LFEs times
    namedir = 'LFEs/' + filename
    if not os.path.exists(namedir):
        os.makedirs(namedir)

    # File to write error messages
    namedir = 'error'
    if not os.path.exists(namedir):
        os.makedirs(namedir)
    errorfile = 'error/' + filename + '.txt'

    # Read the templates
    templates = Stream()
    for station in stations:
        data = pickle.load(open('templates_new/' + filename + \
            '/' + station + '.pkl', 'rb'))
        if (len(data) == 3):
            EW = data[0]
            NS = data[1]
            UD = data[2]
            EW.stats.station = station
            NS.stats.station = station
            EW.stats.channel = 'E'
            NS.stats.channel = 'N'
            templates.append(EW)
            templates.append(NS)
        else:
            UD = data[0]
        UD.stats.station = station
        UD.stats.channel = 'Z'
        templates.append(UD)

    # Begin and end time of analysis
    t1 = UTCDateTime(year=tbegin[0], month=tbegin[1], \
        day=tbegin[2], hour=tbegin[3], minute=tbegin[4], \
        second=tbegin[5])
    t2 = UTCDateTime(year=tend[0], month=tend[1], \
        day=tend[2], hour=tend[3], minute=tend[4], \
        second=tend[5])

    # Read the data
    data = []
    for station in stations:
        # Get station metadata for downloading
        for ir in range(0, len(staloc)):
            if (station == staloc['station'][ir]):
                network = staloc['network'][ir]
                channels = staloc['channels'][ir]
                location = staloc['location'][ir]
                server = staloc['server'][ir]

        # Duration of template
        template = templates.select(station=station, component='Z')[0]
        dt = template.stats.delta
        nt = template.stats.npts
        duration = (nt - 1) * dt   
        Tstart = t1 - TDUR
        Tend = t2 + duration + TDUR
        delta = t2 + duration - t1
        ndata = int(delta / dt) + 1

        # Orientation of template
        # Date chosen: April 1st 2008
        mychannels = channels.split(',')
        mylocation = location
        if (mylocation == '--'):
            mylocation = ''
        response = '../data/response/' + network + '_' + station + '.xml'
        inventory = read_inventory(response, format='STATIONXML')
        reference = []
        for channel in mychannels:
            angle = inventory.get_orientation(network + '.' + \
                station + '.' + mylocation + '.' + channel, \
                UTCDateTime(2012, 1, 1, 0, 0, 0))
            reference.append(angle)

        # First case: we can get the data from IRIS
        if (server == 'IRIS'):
            (D, orientation) = get_from_IRIS(station, network, channels, \
                location, Tstart, Tend, filt, dt, nattempts, waittime, \
                errorfile)
        # Second case: we get the data from NCEDC
        elif (server == 'NCEDC'):
            (D, orientation) = get_from_NCEDC(station, network, channels, \
                location, Tstart, Tend, filt, dt, nattempts, waittime, \
                errorfile)
        else:
            raise ValueError('You can only download data from IRIS and NCEDC')

        # Append data to stream
        if (type(D) == obspy.core.stream.Stream):
            stationdata = fill_data(D, orientation, station, channels, reference)
            if (len(stationdata) > 0):
                for stream in stationdata:
                    data.append(stream)

    # Number of hours of data to analyze
    nhour = int(ceil((t2 - t1) / 3600.0))

    # Create dataframe to store LFE times
    df = pd.DataFrame(columns=['year', 'month', 'day', 'hour', \
        'minute', 'second', 'cc', 'nchannel'])

    # Loop on hours of data
    for hour in range(0, nhour):
        nchannel = 0
        Tstart = t1 + hour * 3600.0
        Tend = t1 + (hour + 1) * 3600.0 + duration
        delta = Tend - Tstart
        ndata = int(delta / dt) + 1

        # Loop on channels
        for channel in range(0, len(data)):
            # Cut the data
            subdata = data[channel]
            subdata = subdata.slice(Tstart, Tend)
            # Check whether we have a complete one-hour-long recording
            if (len(subdata) == 1):
                if (len(subdata[0].data) == ndata):
                    # Get the template
                    station = subdata[0].stats.station
                    component = subdata[0].stats.channel
                    template = templates.select(station=station, \
                        component=component)[0]
                    # Cross correlation
                    cctemp = correlate.optimized(template, subdata[0])
                    if (nchannel > 0):
                        cc = np.vstack((cc, cctemp))
                    else:
                        cc = cctemp
                    nchannel = nchannel + 1
    
        if (nchannel > 0):
   
            # Compute average cross-correlation across channels
            meancc = np.mean(cc, axis=0)
            if (type_threshold == 'MAD'):
                MAD = np.median(np.abs(meancc - np.mean(meancc)))
                index = np.where(meancc >= threshold * MAD)
            elif (type_threshold == 'Threshold'):
                index = np.where(meancc >= threshold)
            else:
                raise ValueError('Type of threshold must be MAD or Threshold')
            times = np.arange(0.0, np.shape(meancc)[0] * dt, dt)

            # Get LFE times
            if np.shape(index)[1] > 0:
                (time, cc) = clean_LFEs(index, times, meancc, dt, freq0)

                # Add LFE times to dataframe
                i0 = len(df.index)
                for i in range(0, len(time)):
                    timeLFE = Tstart + time[i]
                    df.loc[i0 + i] = [int(timeLFE.year), int(timeLFE.month), \
                        int(timeLFE.day), int(timeLFE.hour), \
                        int(timeLFE.minute), timeLFE.second + \
                        timeLFE.microsecond / 1000000.0, cc[i], nchannel]

            # Draw figure
            if (draw == True):
                params = {'xtick.labelsize':16,
                          'ytick.labelsize':16}
                pylab.rcParams.update(params) 
                plt.figure(1, figsize=(20, 8))
                if np.shape(index)[1] > 0:
                    for i in range(0, len(time)):
                        plt.axvline(time[i], linewidth=2, color='grey')
                plt.plot(np.arange(0.0, np.shape(meancc)[0] * dt, \
                    dt), meancc, color='black')
                if (type_threshold == 'MAD'):
                    plt.axhline(threshold * MAD, linewidth=2, color='red', \
                        label = '{:6.2f} * MAD'.format(threshold))
                elif (type_threshold == 'Threshold'):
                    plt.axhline(threshold, linewidth=2, color='red', \
                        label = 'Threshold = {:8.4f}'.format(threshold))
                else:
                    raise ValueError( \
                        'Type of threshold must be MAD or Threshold')
                plt.xlim(0.0, (np.shape(meancc)[0] - 1) * dt)
                plt.xlabel('Time (s)', fontsize=24)
                plt.ylabel('Cross-correlation', fontsize=24)
                plt.title('Average cross-correlation across stations', \
                    fontsize=30)
                plt.legend(loc=2, fontsize=24)
                plt.savefig('LFEs/' + filename + '/' + \
                    '{:04d}{:02d}{:02d}_{:02d}{:02d}{:02d}'.format( \
                    Tstart.year, Tstart.month, Tstart.day, Tstart.hour, \
                    Tstart.minute, Tstart.second) + '.png', format='png')
                plt.close(1)

    # Add to pandas dataframe and save
    namefile = 'LFEs/' + filename + '/catalog.pkl'
    if os.path.exists(namefile):
        df_all = pickle.load(open(namefile, 'rb'))
        df_all = pd.concat([df_all, df], ignore_index=True)
    else:
        df_all = df    
    df_all = df_all.astype(dtype={'year':'int32', 'month':'int32', \
        'day':'int32', 'hour':'int32', 'minute':'int32', \
        'second':'float', 'cc':'float', 'nchannel':'int32'})
    pickle.dump(df_all, open(namefile, 'wb'))
Exemplo n.º 16
0
    def filterWaveform(self, Waveform):

        Logfile.red('Filter Waveform: ')
        cfg = FilterCfg(self.Config)

        #new_frequence = int (self.Config['new_frequence'])
        new_frequence = int(cfg.newFrequency())
        st = Stream()

        for i in Waveform:
            Logfile.red('Downsampling to %d: from %d' %
                        (new_frequence, i.stats.sampling_rate))

            j = self.resampleWaveform(i, new_frequence)
            j.detrend(type='demean')

            old = True  #???

            if old:
                switch = cfg.filterswitch()  # ['filterswitch']

                if switch == 1:
                    Logfile.add('bandpass filtered stream for station %s ' %
                                (i))

                    j.filter(
                        'bandpass',
                        freqmin=cfg.flo(),  # ['flo']
                        freqmax=cfg.fhi(),  # ['fhi']
                        corners=cfg.ns(),  # ['ns']
                        zerophase=bool(self.Config['zph']))

                elif switch == 2:
                    Logfile.add('lowpass filtered stream for station %s ' %
                                (i))

                    j.filter(
                        "lowpass",
                        freq=cfg.l_fc(),  # ['l_fc']
                        corners=cfg.l_ns(),  # ['l_ns']
                        zerophase=bool(self.Config['l_zph']))

                elif switch == 3:
                    Logfile.add('highpass filtered stream for station %s ' %
                                (i))

                    j.filter(
                        "highpass",
                        freq=cfg.h_fc(),  # ['h_fc']
                        corners=cfg.h_ns(),  # ['h_ns']
                        zerophase=bool(self.Config['h_zph']))
                else:
                    dummy = 1
                    #Logfile.add ('bandpass filtered stream for station %s '% (i))

                    #j.filter ("bandpass", freqmin=0.4, freqmax=3,
                    #           corners=3, zerophase=False)
                st.append(j)

            else:
                j1 = filterWaveform_2(Config, j, i)
                st.append(j1)

        #endfor

        return st
Exemplo n.º 17
0
            channels.append(file.split('.')[2][-3:])
            all_files.append(root + '/' + file)
sites = list(set(sites))  # Define site list
channels = list(set(channels))  # Define channel list
split_files = [[[] for n in range(len(channels))] for m in range(len(sites))]
for file in all_files:
    print(file)
    site_idx = sites.index(file.split('/')[-1].split('.')[0])
    channel_idx = channels.index(file.split('/')[-1].split('.')[2][-3:])
    split_files[site_idx][channel_idx].append(file)

# Load in data for each site and perform noise analysis
for m, site in enumerate(sites):
    print('Parsing data for site: ' + site)
    for n, channel in enumerate(channels):
        streams = Stream()
        for o in range(len(split_files[m][n])):
            print('Parsing file:')
            print(split_files[m][n][o])
            streams += obspy.read(split_files[m][n][o])
        print('Merging streams...')
        streams.merge()
        print('Current data is:')
        print(streams)

        # Build probabilistic power spectral density objects for each trace
        all_ppsds = []
        all_ppsd_names = []
        for stream in streams:
            print('Calculating PPSDs for stream:')
            print(stream)
Exemplo n.º 18
0
def getData(tstart, tend, opt):

    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """    
    
    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')
    
    st = Stream()
    
    if opt.server == 'SAC' or opt.server == 'miniSEED':
    
        # Generate list of files
        if opt.server == 'SAC':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.sac')) for root, dirs, files in os.walk(opt.sacdir)))+list(
                itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.SAC')) for root, dirs, files in os.walk(opt.sacdir)))
        elif opt.server == 'miniSEED':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.mseed')) for root, dirs, files in os.walk(opt.mseeddir)))+list(
                itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.MSEED')) for root, dirs, files in os.walk(opt.mseeddir)))
            
        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[0].stats.endtime
                if (ststart<=tstart and tstart<=stend) or (ststart<=tend and tend<=stend):
                    flist_sub.append(f)
    
        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend+opt.maxdt)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)
    
        # Filter and merge
        stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
            zerophase=True)
        stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)
    
        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)
    
        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m] and nets[n] in
                    netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find "+stas[n]+'.'+chas[n]+'.'+nets[n]+'.'+locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())
    
    else:   
     
        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)
        
        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                        tstart, tend+opt.maxdt)
                stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                    corners=2, zerophase=True)
                stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.clients.fdsn.header.FDSNException):
                try: # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                            tstart, tend+opt.maxdt)
                    stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                        corners=2, zerophase=True)
                    stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.clients.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
                                            
            # Last check for length; catches problem with empty waveserver
            if len(stmp) != 1:
                print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])
                
            st.extend(stmp.copy()) 
    
    # Edit 'start' time if using offset option
    if opt.maxdt:
        dts = np.fromstring(opt.offset, sep=',')
        for n, tr in enumerate(st):
            tr.stats.starttime = tr.stats.starttime-dts[n]
    
    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()
    
    return st, stC
Exemplo n.º 19
0
def getStreamObject(starttime, endtime, lslat, lslon, radius=100.):
    """
    Uses seisk reviewData module to grab seismic data using FDSN webservices
    for stations within a specified radius of the landslide as a stream object.
    Increments radius until minimum number of traces or maximum search radius
    is achieved.
    INPUTS
    starttime (UTCDateTime) - start time of stream object
    endtime (UTCDateTime) - end time of stream object
    lslat (float) - latitudinal coordinate of landslide (make negative for south
        of Equator)
    lslon (float) - longitudinal coordinate of landslide (make negative for west
        of Prime Meridian)
    OUTPUT
    st - obspy stream object with seismic data
    """

    # Seismic channels to search for
    channels = 'EHZ,BHZ,HHZ'

    # Search for data within initial radius
    print('Retrieving data for radius = %i km...' % int(radius))
    st = Stream()
    st_init = reviewData.getepidata(lslat,
                                    lslon,
                                    starttime,
                                    tstart=0.,
                                    tend=endtime - starttime,
                                    minradiuskm=0.,
                                    maxradiuskm=radius,
                                    chanuse=channels,
                                    location='*',
                                    clientnames=['IRIS'],
                                    savedat=False,
                                    detrend='demean')
    for trace in st_init:
        st.append(trace)

    # Check if number of traces in stream object less than minimum; if so,
    # increment radius by 50 km and search for data again
    maxradius = 350  # maximum radius to search within (in km)
    mintraces = 5  # minimum number of traces accepted
    while (not st or len(st) < mintraces) and radius <= maxradius:
        radius += 50.  # km
        print('Incrementing radius to %i km and retrieving data...' %
              int(radius))
        st = reviewData.getepidata(lslat,
                                   lslon,
                                   starttime,
                                   tstart=0.,
                                   tend=endtime - starttime,
                                   minradiuskm=0.,
                                   maxradiuskm=radius,
                                   chanuse=channels,
                                   location='*',
                                   clientnames=['IRIS'],
                                   savedat=False,
                                   detrend='demean')

    if st:
        print('%i stations within %i km of landslide.' %
              (len(st), int(radius)))

    return (st)
Exemplo n.º 20
0
# In[7]:

headerP = stP_og[0].stats
headerS = stS_og[0].stats

model_Pangles = [45.75]
model_Sangles = [48.27]

n = 0
for a in model_Pangles:
    print('P')
    stP = stP_og.copy()
    hhQ, hhL = rotate(stP[1].data, stP[2].data, a)
    t1, t2, t3 = Trace(stP[0].data, header=headerP), Trace(
        hhQ, header=headerP), Trace(hhL, header=headerP)
    stP_LQ = Stream(traces=[t1, t2, t3])
    stP_LQ[0].stats.component = 'T'
    stP_LQ[1].stats.component = 'Q'
    stP_LQ[2].stats.component = 'L'

    stP_LQ.plot(equal_scale=True)
    n += 1

#S-wave
n = 0
for a in model_Sangles:
    print('S')
    stS = stS_og.copy()
    hhQ, hhL = rotate(stS[1].data, stS[2].data, a)
    t1, t2, t3 = Trace(stS[0].data, header=headerS), Trace(
        hhQ, header=headerS), Trace(hhL, header=headerS)
 n_chans = float(len(channels))
 axs = []
 fig = plt.figure(figsize=(8, 8))
 for i in range(int(n_chans)):
     if i == 0:
         ax2 = fig.add_axes(
             [0.1, 0.1 + (i * 0.8) / n_chans, 0.7, 0.8 / n_chans])
         axs.append(ax2)
     else:
         axs.append(
             fig.add_axes(
                 [0.1, 0.1 + (i * 0.8) / n_chans, 0.7, 0.8 / n_chans],
                 sharex=axs[i - 1]))
 axs.append(fig.add_axes([0.83, 0.1, 0.03, 0.8]))
 #fig, axs = plt.subplots(n_chans)
 st = Stream()
 for i, channel in enumerate(channels):
     channel_path = os.path.join(station_path, channel)
     chan = read(channel_path)
     st.append(chan[0])
     ax2 = chan.spectrogram(title='Canal: %s' % (channel),
                            show=False,
                            axes=axs[i],
                            cmap=j)
 #print(ax2[0].images[0])
 #canvas = FigureCanvas(fig2)
 #image = np.fromstring(canvas.tostring_rgb(), dtype='uint8')
 #print(list(axs[0]y.get_images()))
 mappable = ax2[0].images[0]
 plt.colorbar(mappable=mappable, cax=axs[-1])
 try:
Exemplo n.º 22
0
def rel_calib_stack(st1,
                    st2,
                    calib_file,
                    window_len,
                    overlap_frac=0.5,
                    smooth=0,
                    save_data=True):
    """
    Method for relative calibration of sensors using a sensor with known
    transfer function

    :param st1: Stream or Trace object, (known)
    :param st2: Stream or Trace object, (unknown)
    :type calib_file: str
    :param calib_file: file name of calibration file containing the PAZ of the
        known instrument in GSE2 standard.
    :type window_len: float
    :param window_len: length of sliding window in seconds
    :type overlap_frac: float
    :param overlap_frac: fraction of overlap, defaults to fifty percent (0.5)
    :type smooth: float
    :param smooth: variable that defines if the Konno-Ohmachi taper is used or
        not. default = 0 -> no taper generally used in geopsy: smooth = 40
    :type save_data: bool
    :param save_data: Whether or not to save the result to a file. If True, two
        output files will be created:
        * The new response in station_name.window_length.resp
        * The ref response in station_name.refResp
        Defaults to True
    :returns: frequency, amplitude and phase spectrum

    implemented after rel_calib_stack.c by M.Ohrnberger and J.Wassermann.
    """
    # transform given trace objects to streams
    if isinstance(st1, Trace):
        st1 = Stream([st1])
    if isinstance(st2, Trace):
        st2 = Stream([st2])
    # check if sampling rate and trace length is the same
    if st1[0].stats.npts != st2[0].stats.npts:
        msg = "Traces don't have the same length!"
        raise ValueError(msg)
    elif st1[0].stats.sampling_rate != st2[0].stats.sampling_rate:
        msg = "Traces don't have the same sampling rate!"
        raise ValueError(msg)
    else:
        ndat1 = st1[0].stats.npts
        sampfreq = st1[0].stats.sampling_rate

    # read waveforms
    tr1 = st1[0].data.astype(np.float64)
    tr2 = st2[0].data.astype(np.float64)

    # get window length, nfft and frequency step
    ndat = int(window_len * sampfreq)
    nfft = next_pow_2(ndat)

    # read calib file and calculate response function
    gg, _freq = _calc_resp(calib_file, nfft, sampfreq)

    # calculate number of windows and overlap
    nwin = int(np.floor((ndat1 - nfft) / (nfft / 2)) + 1)
    noverlap = nfft * overlap_frac

    auto, _freq, _t = \
        spectral_helper(tr1, tr1, NFFT=nfft, Fs=sampfreq, noverlap=noverlap)
    cross, freq, _t = \
        spectral_helper(tr2, tr1, NFFT=nfft, Fs=sampfreq, noverlap=noverlap)

    res = (cross / auto).sum(axis=1) * gg

    # The first item might be zero. Problems with phase calculations.
    res = res[1:]
    freq = freq[1:]
    gg = gg[1:]

    res /= nwin
    # apply Konno-Ohmachi smoothing taper if chosen
    if smooth > 0:
        # Write in one matrix for performance reasons.
        spectra = np.empty((2, len(res.real)))
        spectra[0] = res.real
        spectra[1] = res.imag
        new_spectra = \
            konno_ohmachi_smoothing(spectra, freq, bandwidth=smooth, count=1,
                                    max_memory_usage=1024, normalize=True)
        res.real = new_spectra[0]
        res.imag = new_spectra[1]

    amp = np.abs(res)
    # include phase unwrapping
    phase = np.unwrap(np.angle(res))  # + 2.0 * np.pi
    ra = np.abs(gg)
    rpha = np.unwrap(np.angle(gg))

    if save_data:
        trans_new = (st2[0].stats.station + "." + st2[0].stats.channel + "." +
                     str(window_len) + ".resp")
        trans_ref = st1[0].stats.station + ".refResp"
        # Create empty array for easy saving
        temp = np.empty((len(freq), 3))
        temp[:, 0] = freq
        temp[:, 1] = amp
        temp[:, 2] = phase
        np.savetxt(trans_new, temp, fmt=native_str('%.10f'))
        temp[:, 1] = ra
        temp[:, 2] = rpha
        np.savetxt(trans_ref, temp, fmt=native_str('%.10f'))

    return freq, amp, phase
Exemplo n.º 23
0
    else:
        sts = st.select(channel=channels[i], location=locations[i])
    # Fix to remove overlaps, but not mask the data
    sts = sts.merge()
    sts = sts.split()
    sts.sort(keys=['starttime', 'endtime', 'channel'])

    print(sts)
    for j, tr in enumerate(sts):
        print("Working on trace {}".format(j))
        print(tr)
        length = tr.stats['endtime'] - tr.stats['starttime']
        cumlen = cumlen + length
        nevents_tr = nevents * length / secyear
        ppsd = PPSD(tr.stats, metadata=inv, ppsd_length=200.0)
        ppsd.add(Stream(tr))
        psdmean = 0
        for period in psdperiodrange:
            psds = ppsd.extract_psd_values(period)[0]
            psdmean = psdmean + math.pow(10.0, 0.05 * np.mean(psds))
        psdamp = psdmean / len(psdperiodrange)
        threshold = psdamp * snr
        print("{} Threshold: {}".format(j, threshold))
        nev_tr = np.zeros_like(nevents)
        for k, mag in enumerate(magarray):
            idx = next((x for x, v in enumerate(amp_mag_dist[k][::-1])
                        if v > threshold), None)
            if idx is not None:
                idx = len(distarray) - idx - 1
                nev_tr[:, :, k] = afrac[idx] * nevents_tr[:, :, k]
Exemplo n.º 24
0
def main():
    # hypo params
    win_size = 30  # in seconds
    step_len = 100  # length of each time step (frame size)
    step_stride = step_len / 2  # half overlap of time steps
    num_step = -(step_len / step_stride - 1) + win_size * 100 / step_stride

    out_class = 'test'
    stream_paths = '/data/WC_AItrain/finetune/Events/%s/*Z.SAC' % out_class
    stream_dir = '/data/WC_AItrain/finetune/Events/%s' % out_class
    output_dir = '/home/zhouyj/Documents/AIDP/data/%s/finetune/ppk_frame100_stride50' % out_class
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    stream_files = sorted(glob.glob(stream_paths))

    done_file = []
    for stream_file in stream_files:

        # one-day's data in a tfrecord file
        sta, time, aug_idx, chn, _ = stream_file.split('.')
        jday = time[0:7]  # events happened on one day
        if [jday, aug_idx] not in done_file: done_file.append([jday, aug_idx])
        else: continue

        # Write event waveforms and labels in .tfrecords
        output_name = 'frames_' + jday + '_' + aug_idx + ".tfrecords"
        output_path = os.path.join(output_dir, output_name)
        writer = DataWriter(output_path)

        # Load stream
        stz_paths = sorted(
            glob.glob(stream_dir + '/*{}*.{}.BHZ.SAC'.format(jday, aug_idx)))

        # for all streams:
        for i, stz_path in enumerate(stz_paths):
            sta, time, aug_idx, chn, _ = stz_path.split('.')
            stx = '.'.join([sta, time, aug_idx, 'BHE', 'SAC'])
            sty = '.'.join([sta, time, aug_idx, 'BHN', 'SAC'])
            stz = '.'.join([sta, time, aug_idx, 'BHZ', 'SAC'])
            if not (os.path.exists(stx) and os.path.exists(sty)
                    and os.path.exists(stz)):
                print 'missing trace!'
                continue
            stream = Stream(traces=[read(stx)[0], read(sty)[0], read(stz)[0]])
            stream = stream.detrend('constant').filter(
                'highpass', freq=1.0).normalize()  #TODO
            # drop bad data
            if stream.max()[0] == 0.0 or stream.max()[1] == 0.0 or stream.max(
            )[2] == 0.0:
                print 'brocken trace!'
                continue

            # stream info
            n_traces = len(stream)
            n_samples = len(stream[0].data)
            n_pts = stream[0].stats.sampling_rate * win_size + 1
            lebel, p_time, s_time = 1, stream[0].stats.sac.t0, stream[
                0].stats.sac.t1
            # convert to time_steps and write to TFRecord
            if (n_traces == 3) and (n_pts == n_samples):
                # def input of RNN
                input_array = np.zeros((num_step, n_traces, step_len + 1),
                                       dtype=np.float32)
                # three chn data
                xdata = np.float32(stream[0].data)
                ydata = np.float32(stream[1].data)
                zdata = np.float32(stream[2].data)
                st_data = np.array([xdata, ydata, zdata])
                # convert to time steps
                for j in range(num_step):
                    idx_s = j * step_stride
                    idx_e = idx_s + step_len + 1
                    current_step = st_data[:, idx_s:idx_e]
                    input_array[j, :, :] = current_step

                # Write tfrecords
                writer.write(input_array, step_stride / 100., lebel, p_time,
                             s_time)
                print("+ Creating tfrecords for ppk time steps {}, idx = {}".
                      format(jday, i))
            else:
                print("Missing waveform for ppk time steps: %s" % (jday))
        writer.close()
Exemplo n.º 25
0
for s, array in enumerate(array_list):
  seislist = glob.glob(array + '/*PICKLE')
  array_name = os.path.split(array)[1]
  test = read(array+'/'+array_name+'*PICKLE',format='PICKLE')
  if test[0].stats['az']<az_min or test[0].stats['az']>az_max:
    continue
  if (test[0].stats['dist']< cut_distance_max1 and test[0].stats['dist']> cut_distance_min1) or \
  (test[0].stats['dist']< cut_distance_max2 and test[0].stats['dist']> cut_distance_min2) :
    if if_postcursor_cut:
      if test[0].stats['dist']< cut_distance_max1 and test[0].stats['dist']> cut_distance_min1:
        time_min = np.interp(test[0].stats['az'],cut_y1,cut_x1)
      elif test[0].stats['dist']< cut_distance_max2 and test[0].stats['dist']> cut_distance_min2:
        time_min = np.interp(test[0].stats['az'],cut_y2,cut_x2)
      else:
        continue  
    st = Stream()
    stime_list = []
    # Loop in the specific arrays
    for i, seisname in enumerate(seislist):
      print(seisname)
      seis = read(seisname,format='PICKLE')
      st += seis.select(channel=component)
      st[i].stats.coordinates = AttribDict({
          'latitude': seis[0].stats['stla'],
          'elevation': 0, #seis[0].stats['stelv']/1000,
          'longitude': seis[0].stats['stlo']})
      stime_list.append(seis[0].stats.traveltimes['Sdiff'])
    st.resample(10)
    stime_list = np.array(stime_list)
    print('Sdiff time deviation of %s is  %f' %(array_name,stime_list.max()-stime_list.min()))
    print(stime_list)
Exemplo n.º 26
0
def fill_data(D, orientation, station, channels, reference):
    """
    Return the data that must be cross correlated with the template

    Input:
        type D = obspy Stream
        D = Data downloaded
        type orientation = list of dictionnaries
        orientation = azimuth, dip for 3 channels (for data)
        type station = string
        station = Name of station
        type channels = string
        channels = Names of channels
        type reference = list of dictionnaries
        reference = azimuth, dip for 3 channels (for template)
    Output:
        type data = list of obspy Stream
        data = Data to be analyzed with correct azimuth
    """
    # East-West channel
    EW = Stream()
    if (channels == 'EH1,EH2,EHZ'):
        if (len(D.select(channel='EH1')) > 0):
            EW = D.select(channel='EH1')
    else:
        if (len(D.select(component='E')) > 0):
            EW = D.select(component='E')
    # North-South channel
    NS = Stream()
    if (channels == 'EH1,EH2,EHZ'):
        if (len(D.select(channel='EH2')) > 0):
            NS = D.select(channel='EH2')
    else:
        if (len(D.select(component='N')) > 0):
            NS = D.select(component='N')
    # Vertical channel
    UD = Stream()
    if (channels == 'EH1,EH2,EHZ'):
        if (len(D.select(channel='EHZ')) > 0):
            UD = D.select(channel='EHZ')
    else:
        if (len(D.select(component='Z')) > 0):
            UD = D.select(component='Z')
    # Rotation of the data
    data = []
    if ((len(EW) > 0) and (len(NS) > 0) and (len(EW) == len(NS))):
        # Orientation of the data
        dE = orientation[0]['azimuth'] * pi / 180.0
        dN = orientation[1]['azimuth'] * pi / 180.0
        # Orientation of the template
        tE = reference[0]['azimuth'] * pi / 180.0
        tN = reference[1]['azimuth'] * pi / 180.0
        EWrot = Stream()
        NSrot = Stream()
        for i in range(0, len(EW)):
            if (len(EW[i].data) == len(NS[i].data)):
                EWrot0 = EW[i].copy()
                NSrot0 = NS[i].copy()
                EWrot0.data = cos(dE - tE) * EW[i].data + \
                              cos(dN - tE) * NS[i].data
                NSrot0.data = cos(dE - tN) * EW[i].data + \
                              cos(dN - tN) * NS[i].data
                EWrot0.stats.station = station
                EWrot0.stats.channel = 'E'
                NSrot0.stats.station = station
                NSrot0.stats.channel = 'N'
                EWrot.append(EWrot0)
                NSrot.append(NSrot0)
        data.append(EWrot)
        data.append(NSrot)
    if (len(UD) > 0):
        for i in range(0, len(UD)):
            UD[i].stats.station = station
            UD[i].stats.channel = 'Z'
        data.append(UD)
    return (data)
Exemplo n.º 27
0
def getData(tstart, tend, opt):
    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """

    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')

    st = Stream()

    if opt.server == 'SAC' or opt.server == 'miniSEED':

        # Generate list of files
        if opt.server == 'SAC':
            flist = glob.glob(opt.sacdir + '*.sac') + glob.glob(opt.sacdir +
                                                                '*.SAC')
        elif opt.server == 'miniSEED':
            flist = glob.glob(opt.mseeddir +
                              '*.mseed') + glob.glob(opt.mseeddir + '*.MSEED')

        # Load data from file
        stmp = Stream()
        for f in flist:
            tmp = obspy.read(f, starttime=tstart, endtime=tend)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)

        # Filter and merge
        stmp = stmp.filter('bandpass',
                           freqmin=opt.fmin,
                           freqmax=opt.fmax,
                           corners=2,
                           zerophase=True)
        stmp = stmp.taper(0.05, type='hann', max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)

        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)

        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m]
                        and nets[n] in netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find " + stas[n] + '.' + chas[n] + '.' +
                      nets[n] + '.' + locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())

    else:

        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)

        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                                            tstart, tend)
                stmp = stmp.filter('bandpass',
                                   freqmin=opt.fmin,
                                   freqmax=opt.fmax,
                                   corners=2,
                                   zerophase=True)
                stmp = stmp.taper(0.05, type='hann', max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.fdsn.header.FDSNException):
                try:  # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n],
                                                chas[n], tstart, tend)
                    stmp = stmp.filter('bandpass',
                                       freqmin=opt.fmin,
                                       freqmax=opt.fmax,
                                       corners=2,
                                       zerophase=True)
                    stmp = stmp.taper(0.05,
                                      type='hann',
                                      max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n], nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
            st.extend(stmp.copy())

    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()

    return st, stC
Exemplo n.º 28
0
def keys_with_gaps(obspy_dic):
    keys = set()
    for key, trace in obspy_dic.items():
        if Stream(trace).get_gaps():
            keys.add(key)
    return keys
Exemplo n.º 29
0
def ProcessStream(stream):
    n_day = 0
    for trace in stream:
        if trace.stats.channel != channel:
            continue
        net = trace.stats.network
        sta = trace.stats.station
        station_path = sacPath + sta + '-' + net
        if not os.path.exists(station_path):
            os.makedirs(station_path)
        starttime = trace.stats.starttime
        endtime = trace.stats.endtime
        day_gap = datetime.date(
            endtime.year, endtime.month, endtime.day) - datetime.date(
                starttime.year, starttime.month, starttime.day)
        time_day = [starttime]
        for ii in range(day_gap.days):
            splittime = datetime.date(time_day[ii].year, time_day[ii].month,
                                      time_day[ii].day) + datetime.timedelta(1)
            splittime = str(splittime.year) + '-' + str(
                splittime.month) + '-' + str(splittime.day) + 'T00:00:00'
            splittime = UTCDateTime(splittime)
            time_day.append(splittime)
        time_day.append(endtime)

        for ii in range(len(time_day) - 1):
            starttime = time_day[ii]
            endtime = time_day[ii + 1]
            st_day = trace.slice(starttime, endtime, nearest_sample=False)
            if st_day.stats.npts < 10:
                continue
            day_name = sacPath + sta + '-' + net + '/' + sta + '-' + net + '-' + str(
                starttime.year) + '-' + str(
                    starttime.julday).zfill(3) + '-' + channel + '.SAC'
            if os.path.exists(day_name):
                st_tmp = read(day_name, format='SAC')
                if int(st_tmp[0].stats.sampling_rate) != int(resamplingRate):
                    resampling_rate = int(
                        round(st_tmp[0].stats.sampling_rate / resamplingRate))
                    st_tmp[0].decimate(resampling_rate)
                day_name_tmp = day_name + '.tmp'
                st_day.write(day_name_tmp, format='SAC')
                st_day = read(day_name_tmp, format='SAC')
                os.remove(day_name_tmp)
                if int(st_day[0].stats.sampling_rate) != int(resamplingRate):
                    resampling_rate = int(
                        round(st_day[0].stats.sampling_rate / resamplingRate))
                    if resampling_rate == 100:
                        st_day[0].decimate(10, no_filter=True)
                        st_day[0].decimate(10, no_filter=True)
                    else:
                        st_day[0].decimate(resampling_rate, no_filter=True)
                if np.abs(st_day[0].stats.sampling_rate -
                          resamplingRate) > 0.01:
                    st_day[0].resample(resamplingRate)
                else:
                    st_day[0].stats.sampling_rate = resamplingRate
                st_tmp[0].data = st_tmp[0].data.astype(np.float32)
                st_day[0].data = st_day[0].data.astype(np.float32)
                merge_data = Stream()
                merge_data.append(st_tmp[0])
                merge_data.append(st_day[0])
                merge_data.sort(['starttime'])
                merge_data.merge(method=1, fill_value='latest')
                os.remove(day_name)
                merge_data.write(day_name, format='SAC')
            else:
                if int(st_day.stats.sampling_rate) != int(resamplingRate):
                    resampling_rate = int(
                        round(st_day.stats.sampling_rate / resamplingRate))
                    if resampling_rate == 100:
                        st_day.decimate(10, no_filter=True)
                        st_day.decimate(10, no_filter=True)
                    else:
                        st_day.decimate(resampling_rate, no_filter=True)
                if np.abs(st_day.stats.sampling_rate - resamplingRate) > 0.01:
                    st_day.resample(resamplingRate)
                else:
                    st_day.stats.sampling_rate = resamplingRate
                st_day.write(day_name, format='SAC')
                n_day = n_day + 1

            print(sta + ' now julday is: ' + str(starttime.date), n_day)
Exemplo n.º 30
0
Arquivo: client.py Projeto: mbyt/obspy
 def get_preview(self,
                 trace_ids=[],
                 starttime=None,
                 endtime=None,
                 network=None,
                 station=None,
                 location=None,
                 channel=None,
                 pad=False):
     """
     Returns the preview trace.
     """
     # build up query
     session = self.session()
     query = session.query(WaveformChannel)
     # start and end time
     try:
         starttime = UTCDateTime(starttime)
     except:
         starttime = UTCDateTime() - 60 * 20
     finally:
         query = query.filter(WaveformChannel.endtime > starttime.datetime)
     try:
         endtime = UTCDateTime(endtime)
     except:
         # 10 minutes
         endtime = UTCDateTime()
     finally:
         query = query.filter(WaveformChannel.starttime < endtime.datetime)
     # process arguments
     if trace_ids:
         # filter over trace id list
         trace_filter = or_()
         for trace_id in trace_ids:
             temp = trace_id.split('.')
             if len(temp) != 4:
                 continue
             trace_filter.append(
                 and_(WaveformChannel.network == temp[0],
                      WaveformChannel.station == temp[1],
                      WaveformChannel.location == temp[2],
                      WaveformChannel.channel == temp[3]))
         if trace_filter.clauses:
             query = query.filter(trace_filter)
     else:
         # filter over network/station/location/channel id
         kwargs = {
             'network': network,
             'station': station,
             'location': location,
             'channel': channel
         }
         for key, value in kwargs.items():
             if value is None:
                 continue
             col = getattr(WaveformChannel, key)
             if '*' in value or '?' in value:
                 value = value.replace('?', '_')
                 value = value.replace('*', '%')
                 query = query.filter(col.like(value))
             else:
                 query = query.filter(col == value)
     # execute query
     results = query.all()
     session.close()
     # create Stream
     st = Stream()
     for result in results:
         preview = result.get_preview()
         st.append(preview)
     # merge and trim
     st = merge_previews(st)
     st.trim(starttime, endtime, pad=pad)
     return st
Exemplo n.º 31
0
    def test_ppsd(self):
        """tests that the psd with the algorithm used in this module
        and the original evaluation algorithm (see _old class) produce the same
        results with scores that do not differ by more than 0.01 (roughly)
        """
        psd_periods_to_test = [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10,
                               15, 20]
        feats_rtol = 0.01  # 1e-2
        scores_rtol = 0.013  # 1.2e-2  #
        dataroot = join(dirname(__file__), 'data')
        for file, inv in (
            [
                join(dataroot, 'trace_GE.APE.mseed'),
                join(dataroot, 'inventory_GE.APE.xml')
            ],
            [
                join(dataroot, 'GE.FLT1..HH?.mseed'),
                join(dataroot, 'GE.FLT1.xml')
            ],
            [
                ('http://service.iris.edu/fdsnws/dataselect/1/query?'
                 '&net=TA&sta=A2*&start=2019-01-04T23:22:00&cha=BH?'
                 '&end=2019-01-04T23:24:00'),
                ('http://service.iris.edu/fdsnws/station/1/query?&net=TA'
                 '&sta=A2*&start=2019-01-04T23:22:00&cha=BH?'
                 '&end=2019-01-04T23:24:00&level=response')
            ],
        ):
            # trace, inv = 'GE.FLT1..HH?.mseed', 'GE.FLT1.xml'
            orig_stream = read(file)
            # print([_.get_id() for _ in orig_stream])
            metadata = read_inventory(inv)
            for multip_fact in [-1000, 1, 10000]:
                stream = Stream()
                for t in orig_stream:
                    t = t.copy()
                    t.data *= multip_fact
                    stream.append(t)
                # calculate features but do not capture stderr cause it causes
                # problems with temporarily set output captures:
                feats = traces_features(stream, metadata)
                feats_old = np.asarray([obspyPSD.psd_values([5], _, metadata)
                                        for _ in stream])
                self.assertTrue(np.allclose(feats, feats_old, rtol=feats_rtol,
                                            atol=0, equal_nan=True))
                scores = aa_scores(feats)
                scores_old = aa_scores(feats_old)
                self.assertTrue(np.allclose(scores, scores_old, rtol=scores_rtol,
                                            atol=0, equal_nan=True))
                # test that the current version of psd is the same as our
                # first implementation (copied below in this module):
                feats_old2 = np.asarray([_old_psd_values([5], _, metadata)
                                        for _ in stream])
                assert np.allclose(feats, feats_old2, rtol=1.e-8)

                # test actually that more PSDs are the same (not only at the
                # feature(s) computed above)
                for _ in stream:
                    _psds_old = _old_psd_values(psd_periods_to_test, _,
                                                metadata)
                    _psds_new = trace_psd(_, metadata, psd_periods_to_test)[0]
                    assert np.allclose(_psds_old, _psds_new, equal_nan=True)