Example #1
0
def eventPicker(
        data,
        component='all',
        phase='P',
        window=(-100, 400),
        filter=(None, None),
        new_sampling_rate=100,
        write=True,  #@ReservedAssignment
        **kwargs):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info('Start event picker: %s' % util.parameters())
    try:
        log.info('Data used %s' % data.raw)
    except:
        log.info('Data regex used %s' % data.raw_regex)
    log.info('Extraced data for events will be saved in %s' % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception('No arguments to determine events!')
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info('Events between %s and %s' %
             (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after='%s-1-1' % year,
                                     before='%s-1-1' % (year + 1),
                                     replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude,
                                           event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude,
                                      event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning(
                        'Phase %s not present at distance %s depth %s' %
                        (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict({
                    'event': event,
                    'station': station_name,
                    'dist': dist,
                    'azi': baz,
                    'inci': arrival.inci,
                    phase.lower() + 'onset': onset,
                    'slowness': arrival.slow,
                    'filter': ''
                })
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (
                        max(stream_year.getHI('sampling_rate')) / 2.):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name,
                                       event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning('Failed to load the data for:\nstation     event.id     '
                    'reason\n' +
                    '\n'.join([' '.join(entry) for entry in failure_list]))
    if write:
        return failure_list
    else:
        return stream_return, failure_list
Example #2
0
def eventPicker(
    data,
    component="all",
    phase="P",
    window=(-100, 400),
    filter=(None, None),
    new_sampling_rate=100,
    write=True,  # @ReservedAssignment
    **kwargs
):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info("Start event picker: %s" % util.parameters())
    try:
        log.info("Data used %s" % data.raw)
    except:
        log.info("Data regex used %s" % data.raw_regex)
    log.info("Extraced data for events will be saved in %s" % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception("No arguments to determine events!")
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info("Events between %s and %s" % (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after="%s-1-1" % year, before="%s-1-1" % (year + 1), replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude, event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude, event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning("Phase %s not present at distance %s depth %s" % (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict(
                    {
                        "event": event,
                        "station": station_name,
                        "dist": dist,
                        "azi": baz,
                        "inci": arrival.inci,
                        phase.lower() + "onset": onset,
                        "slowness": arrival.slow,
                        "filter": "",
                    }
                )
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (max(stream_year.getHI("sampling_rate")) / 2.0):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name, event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning(
            "Failed to load the data for:\nstation     event.id     "
            "reason\n" + "\n".join([" ".join(entry) for entry in failure_list])
        )
    if write:
        return failure_list
    else:
        return stream_return, failure_list
Example #3
0
 def getRawStream(self,
                  date,
                  station,
                  component='Z',
                  endtime=False,
                  checkfile=False):
     if component == 'all':
         component = 'ZNE'
     NC = len(component)
     if NC > 1:
         stream = Stream()
         if checkfile:
             stream = []
         for comp in component:
             stream.extend(self.getRawStream(date, station, comp, endtime))
         if checkfile:
             import numpy as np
             return np.all(stream)
         #if None in stream:
         #    raise ValueError('One or more component is None')
         Ns = [stream[i].stats.npts for i in range(NC)]
         #N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
         #Ns = (N1, N2, N3)
         if max(Ns) - min(Ns) > 1:
             raise ValueError('Components have different length')
         elif max(Ns) - min(Ns) == 1:
             for i in range(NC):
                 if Ns[i] > min(Ns):
                     stream[i].data = stream[i].data[:-1]
                     stream[i].stats.ntps -= 1
         #return st_list[0] + st_list[1] + st_list[2]
         return stream
     if station == 'LVC':
         log.warning('Using BH channel for LVC')
         file_ = self.lookForMseed(date, station, 'BH' + component)
     else:
         file_ = self.lookForMseed(date, station, 'HH' + component)
     if file_ == None:
         raise ValueError('No IPOC file for %s %s %s' %
                          (station, component, date.date))
     elif checkfile:
         return True
     merge_later = False
     try:
         if endtime and date.julday == endtime.julday:
             ms = read(file_,
                       format='MSEED',
                       starttime=date,
                       endtime=endtime)
         elif endtime and date.julday != endtime.julday:
             border = date.__class__(date.date) + 24 * 3600
             ms1 = read(file_, starttime=date)  #, endtime=border)
             ms2 = self.getRawStream(border, station, component, endtime)
             ms = ms1 + ms2
             ms.merge()
             merge_later = True
         else:
             ms = read(file_)
     except (ValueError, TypeError) as ex:
         raise ValueError('Error reading IPOC file %s because:\n%s' %
                          (file_, str(ex)))
     if len(ms) == 0:
         raise ValueError('No traces in IPOC stream!')
     if station == 'LVC':
         for tr in ms:
             if tr.stats.channel[-1] == '1':
                 tr.stats.channel = tr.stats.channel[:-1] + 'N'
             elif tr.stats.channel[-1] == '2':
                 tr.stats.channel = tr.stats.channel[:-1] + 'E'
     if any([network == 'NC' for network in ms.getHI('network')]):
         # change network code to CX
         ms.setHI('network', 'CX')
         if merge_later:
             ms.merge()
     return ms
Example #4
0
    def getRawStream(self, date, station, component="Z", endtime=False, checkfile=False):
        if component == "all":
            component = "ZNE"
        NC = len(component)
        if NC > 1:
            stream = Stream()
            if checkfile:
                stream = []
            for comp in component:
                stream.extend(self.getRawStream(date, station, comp, endtime))
            if checkfile:
                import numpy as np

                return np.all(stream)
            # if None in stream:
            #    raise ValueError('One or more component is None')
            Ns = [stream[i].stats.npts for i in range(NC)]
            # N1, N2, N3 = len(st_list[0]), len(st_list[1]), len(st_list[2])
            # Ns = (N1, N2, N3)
            if max(Ns) - min(Ns) > 1:
                raise ValueError("Components have different length")
            elif max(Ns) - min(Ns) == 1:
                for i in range(NC):
                    if Ns[i] > min(Ns):
                        stream[i].data = stream[i].data[:-1]
                        stream[i].stats.ntps -= 1
            # return st_list[0] + st_list[1] + st_list[2]
            return stream
        if station == "LVC":
            log.warning("Using BH channel for LVC")
            file_ = self.lookForMseed(date, station, "BH" + component)
        else:
            file_ = self.lookForMseed(date, station, "HH" + component)
        if file_ == None:
            raise ValueError("No IPOC file for %s %s %s" % (station, component, date.date))
        elif checkfile:
            return True
        merge_later = False
        try:
            if endtime and date.julday == endtime.julday:
                ms = read(file_, format="MSEED", starttime=date, endtime=endtime)
            elif endtime and date.julday != endtime.julday:
                border = date.__class__(date.date) + 24 * 3600
                ms1 = read(file_, starttime=date)  # , endtime=border)
                ms2 = self.getRawStream(border, station, component, endtime)
                ms = ms1 + ms2
                ms.merge()
                merge_later = True
            else:
                ms = read(file_)
        except (ValueError, TypeError) as ex:
            raise ValueError("Error reading IPOC file %s because:\n%s" % (file_, str(ex)))
        if len(ms) == 0:
            raise ValueError("No traces in IPOC stream!")
        if station == "LVC":
            for tr in ms:
                if tr.stats.channel[-1] == "1":
                    tr.stats.channel = tr.stats.channel[:-1] + "N"
                elif tr.stats.channel[-1] == "2":
                    tr.stats.channel = tr.stats.channel[:-1] + "E"
        if any([network == "NC" for network in ms.getHI("network")]):
            # change network code to CX
            ms.setHI("network", "CX")
            if merge_later:
                ms.merge()
        return ms