示例#1
0
文件: data.py 项目: wangwu1991/sito
def eventPicker(
        data,
        component='all',
        phase='P',
        window=(-100, 400),
        filter=(None, None),
        new_sampling_rate=100,
        write=True,  #@ReservedAssignment
        **kwargs):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info('Start event picker: %s' % util.parameters())
    try:
        log.info('Data used %s' % data.raw)
    except:
        log.info('Data regex used %s' % data.raw_regex)
    log.info('Extraced data for events will be saved in %s' % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception('No arguments to determine events!')
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info('Events between %s and %s' %
             (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after='%s-1-1' % year,
                                     before='%s-1-1' % (year + 1),
                                     replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude,
                                           event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude,
                                      event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning(
                        'Phase %s not present at distance %s depth %s' %
                        (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict({
                    'event': event,
                    'station': station_name,
                    'dist': dist,
                    'azi': baz,
                    'inci': arrival.inci,
                    phase.lower() + 'onset': onset,
                    'slowness': arrival.slow,
                    'filter': ''
                })
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (
                        max(stream_year.getHI('sampling_rate')) / 2.):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name,
                                       event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning('Failed to load the data for:\nstation     event.id     '
                    'reason\n' +
                    '\n'.join([' '.join(entry) for entry in failure_list]))
    if write:
        return failure_list
    else:
        return stream_return, failure_list
示例#2
0
文件: data.py 项目: iceseismic/sito
def eventPicker(
    data,
    component="all",
    phase="P",
    window=(-100, 400),
    filter=(None, None),
    new_sampling_rate=100,
    write=True,  # @ReservedAssignment
    **kwargs
):
    """
    Pick window around onset of events from mseed files.

    The resulting stream is written in seperate files for each station and year.
    :param data: data object with stations property and getRawStream,
                 writeRFEvents methods
    :param events: file with events, Events object or None (in this case kwargs
        have to be defined) - passed to _getEvents
    :param component: 'Z', 'N', 'E' or 'all'
    :param phase: which ponset is used? 'P', 'PP' or 'S' or something else
        consider that events must show this phase for the stations
    :param window: window around pnset in seconds
    :param filter: filter stream between these frequencies
    :param new_sampling_rate: downsample stream to rhis sampling rate
    :param write: if True: everything is written to files
        if False: return stream object
    :kwargs: passed to _getEvents
        - in the end they are passed to events.Events.load function
        if param events == None
    """
    log.info("Start event picker: %s" % util.parameters())
    try:
        log.info("Data used %s" % data.raw)
    except:
        log.info("Data regex used %s" % data.raw_regex)
    log.info("Extraced data for events will be saved in %s" % data.rf_events)
    if data.events == None and len(kwargs) == 0:
        raise Exception("No arguments to determine events!")
    failure_list = []
    if write:
        stream_return = None
    else:
        stream_return = Stream()
    stations = data.stations
    all_events = _getEvents(data.events, **kwargs)
    all_events.sort()
    log.info("Events between %s and %s" % (all_events[0].datetime.date, all_events[-1].datetime.date))
    first_year = all_events[0].datetime.year
    last_year = all_events[-1].datetime.year
    for station_name, station in stations.items():
        for year in range(first_year, last_year + 1):
            events = all_events.pick(after="%s-1-1" % year, before="%s-1-1" % (year + 1), replace=False)
            stream_year = Stream()
            for event in events:
                dist = util.gps2DistDegree(station.latitude, station.longitude, event.latitude, event.longitude)
                baz = gps2DistAzimuth(station.latitude, station.longitude, event.latitude, event.longitude)[1]
                arrival = util.ttt(dist, event.depth).findPhase(phase)
                if arrival == None:
                    log.warning("Phase %s not present at distance %s depth %s" % (phase, dist, event.depth))
                    arrival = util.ttt(dist, event.depth)[0]
                onset = event.datetime + arrival.time
                t1 = onset + window[0]
                t2 = onset + window[1]
                try:
                    stream = data.getRawStream(t1, station_name, component, t2)
                except Exception as ex:
                    failure_list.append((station_name, event.id, str(ex)))
                    continue
                # Write header entries and basic data processing (filtering, downsampling)
                stats = AttribDict(
                    {
                        "event": event,
                        "station": station_name,
                        "dist": dist,
                        "azi": baz,
                        "inci": arrival.inci,
                        phase.lower() + "onset": onset,
                        "slowness": arrival.slow,
                        "filter": "",
                    }
                )
                for trace in stream:
                    trace.stats.update(stats)
                stream_year.extend(stream)
            if len(stream_year) > 0:
                stream_year.demean()
                stream_year.detrend()
                if filter[0] != None or filter[1] != None:
                    stream_year.filter2(freqmin=filter[0], freqmax=filter[1])
                if new_sampling_rate <= (max(stream_year.getHI("sampling_rate")) / 2.0):
                    stream_year.downsample2(new_sampling_rate)
                if write:
                    data.writeRFEvents(stream_year, station_name, event.datetime)
                else:
                    stream_return.extend(stream_year)
    if len(failure_list) > 0:
        log.warning(
            "Failed to load the data for:\nstation     event.id     "
            "reason\n" + "\n".join([" ".join(entry) for entry in failure_list])
        )
    if write:
        return failure_list
    else:
        return stream_return, failure_list