Example #1
0
def _detect(detector, st, threshold, trig_int, moveout=0, min_trig=0,
            process=True, extract_detections=False, debug=0):
    """
    Detect within continuous data using the subspace method.

    Not to be called directly, use the detector.detect method.

    :type detector: eqcorrscan.core.subspace.Detector
    :param detector: Detector to use.
    :type st: obspy.core.stream.Stream
    :param st: Un-processed stream to detect within using the subspace \
        detector
    :type threshold: float
    :param threshold: Threshold value for detections between 0-1
    :type trig_int: float
    :param trig_int: Minimum trigger interval in seconds.
    :type moveout: float
    :param moveout: Maximum allowable moveout window for non-multiplexed,
        network detection.  See note.
    :type min_trig: int
    :param min_trig: Minimum number of stations exceeding threshold for \
        non-multiplexed, network detection. See note.
    :type process: bool
    :param process: Whether or not to process the stream according to the \
        parameters defined by the detector.  Default is to process the \
        data (True).
    :type extract_detections: bool
    :param extract_detections: Whether to extract waveforms for each \
        detection or not, if true will return detections and streams.
    :type debug: int
    :param debug: Debug output level from 0-5.

    :return: list of detections
    :rtype: list of eqcorrscan.core.match_filter.DETECTION
    """
    from eqcorrscan.core import subspace_statistic
    detections = []
    # First process the stream
    if process:
        if debug > 0:
            print('Processing Stream')
        stream, stachans = _subspace_process(streams=[st.copy()],
                                             lowcut=detector.lowcut,
                                             highcut=detector.highcut,
                                             filt_order=detector.filt_order,
                                             sampling_rate=detector.
                                             sampling_rate,
                                             multiplex=detector.multiplex,
                                             stachans=detector.stachans,
                                             parallel=True,
                                             align=False,
                                             shift_len=None,
                                             reject=False)
    else:
        # Check the sampling rate at the very least
        for tr in st:
            if not tr.stats.sampling_rate == detector.sampling_rate:
                raise ValueError('Sampling rates do not match.')
        stream = [st]
        stachans = detector.stachans
    outtic = time.clock()
    if debug > 0:
        print('Computing detection statistics')
    stats = np.zeros((len(stream[0]),
                      len(stream[0][0]) - len(detector.data[0][0]) + 1),
                     dtype=np.float32)
    for det_channel, in_channel, i in zip(detector.data, stream[0],
                                          np.arange(len(stream[0]))):
        stats[i] = subspace_statistic.\
            det_statistic(detector=det_channel.astype(np.float32),
                          data=in_channel.data.astype(np.float32))
        if debug > 0:
            print(stats[i].shape)
        if debug > 3:
            plt.plot(stats[i])
            plt.show()
        # Hard typing in Cython loop requires float32 type.
    # statistics
    if detector.multiplex:
        trig_int_samples = (len(detector.stachans) *
                            detector.sampling_rate * trig_int)
    else:
        trig_int_samples = detector.sampling_rate * trig_int
    if debug > 0:
        print('Finding peaks')
    peaks = []
    for i in range(len(stream[0])):
        peaks.append(findpeaks.find_peaks2_short(arr=stats[i],
                                                 thresh=threshold,
                                                 trig_int=trig_int_samples,
                                                 debug=debug))
    if not detector.multiplex:
        # Conduct network coincidence triggering
        peaks = findpeaks.coin_trig(peaks=peaks,
                                    samp_rate=detector.sampling_rate,
                                    moveout=moveout, min_trig=min_trig,
                                    stachans=stachans, trig_int=trig_int)
    else:
        peaks = peaks[0]
    if len(peaks) > 0:
        for peak in peaks:
            if detector.multiplex:
                detecttime = st[0].stats.starttime + (peak[1] /
                                                      (detector.sampling_rate *
                                                       len(detector.stachans)))
            else:
                detecttime = st[0].stats.starttime + (peak[1] /
                                                      detector.sampling_rate)
            rid = ResourceIdentifier(id=detector.name + '_' +
                                     str(detecttime),
                                     prefix='smi:local')
            ev = Event(resource_id=rid)
            cr_i = CreationInfo(author='EQcorrscan',
                                creation_time=UTCDateTime())
            ev.creation_info = cr_i
            # All detection info in Comments for lack of a better idea
            thresh_str = 'threshold=' + str(threshold)
            ccc_str = 'detect_val=' + str(peak[0])
            used_chans = 'channels used: ' +\
                ' '.join([str(pair) for pair in detector.stachans])
            ev.comments.append(Comment(text=thresh_str))
            ev.comments.append(Comment(text=ccc_str))
            ev.comments.append(Comment(text=used_chans))
            for stachan in detector.stachans:
                tr = st.select(station=stachan[0], channel=stachan[1])
                if tr:
                    net_code = tr[0].stats.network
                else:
                    net_code = ''
                pick_tm = detecttime
                wv_id = WaveformStreamID(network_code=net_code,
                                         station_code=stachan[0],
                                         channel_code=stachan[1])
                ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
            detections.append(DETECTION(detector.name,
                                        detecttime,
                                        len(detector.stachans),
                                        peak[0],
                                        threshold,
                                        'subspace', detector.stachans,
                                        event=ev))
    outtoc = time.clock()
    print('Detection took %s seconds' % str(outtoc - outtic))
    if extract_detections:
        detection_streams = extract_from_stream(st, detections)
        return detections, detection_streams
    return detections
Example #2
0
def full_test_event():
    """
    Function to generate a basic, full test event
    """
    test_event = Event()
    test_event.origins.append(
        Origin(time=UTCDateTime("2012-03-26") + 1.2,
               latitude=45.0,
               longitude=25.0,
               depth=15000))
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.magnitudes.append(
        Magnitude(mag=0.1,
                  magnitude_type='ML',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(
        Magnitude(mag=0.5,
                  magnitude_type='Mc',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(
        Magnitude(mag=1.3,
                  magnitude_type='Ms',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[0].resource_id))

    # Define the test pick
    _waveform_id_1 = WaveformStreamID(station_code='FOZ',
                                      channel_code='SHZ',
                                      network_code='NZ')
    _waveform_id_2 = WaveformStreamID(station_code='WTSZ',
                                      channel_code='BH1',
                                      network_code=' ')
    # Pick to associate with amplitude - 0
    test_event.picks = [
        Pick(waveform_id=_waveform_id_1,
             phase_hint='IAML',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_1,
             onset='impulsive',
             phase_hint='PN',
             polarity='positive',
             time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_1,
             phase_hint='IAML',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_2,
             onset='impulsive',
             phase_hint='SG',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.72,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_2,
             onset='impulsive',
             phase_hint='PN',
             polarity='undecidable',
             time=UTCDateTime("2012-03-26") + 1.62,
             evaluation_mode="automatic")
    ]
    # Test a generic local magnitude amplitude pick
    test_event.amplitudes = [
        Amplitude(generic_amplitude=2.0,
                  period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id,
                  unit='m',
                  magnitude_hint='ML',
                  category='point',
                  type='AML'),
        Amplitude(generic_amplitude=10,
                  pick_id=test_event.picks[1].resource_id,
                  waveform_id=test_event.picks[1].waveform_id,
                  type='END',
                  category='duration',
                  unit='s',
                  magnitude_hint='Mc',
                  snr=2.3),
        Amplitude(generic_amplitude=5.0,
                  period=0.6,
                  pick_id=test_event.picks[2].resource_id,
                  waveform_id=test_event.picks[0].waveform_id,
                  unit='m',
                  category='point',
                  type='AML')
    ]
    test_event.origins[0].arrivals = [
        Arrival(time_weight=0,
                phase=test_event.picks[1].phase_hint,
                pick_id=test_event.picks[1].resource_id),
        Arrival(time_weight=2,
                phase=test_event.picks[3].phase_hint,
                pick_id=test_event.picks[3].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25),
        Arrival(time_weight=2,
                phase=test_event.picks[4].phase_hint,
                pick_id=test_event.picks[4].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25)
    ]
    # Add in error info (line E)
    test_event.origins[0].quality = OriginQuality(standard_error=0.01,
                                                  azimuthal_gap=36)
    # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has
    # semi-major and semi-minor
    test_event.origins[0].origin_uncertainty = OriginUncertainty(
        confidence_ellipsoid=ConfidenceEllipsoid(
            semi_major_axis_length=3000,
            semi_minor_axis_length=1000,
            semi_intermediate_axis_length=2000,
            major_axis_plunge=20,
            major_axis_azimuth=100,
            major_axis_rotation=4))
    test_event.origins[0].time_errors = QuantityError(uncertainty=0.5)
    # Add in fault-plane solution info (line F) - Note have to check program
    # used to determine which fields are filled....
    test_event.focal_mechanisms.append(
        FocalMechanism(nodal_planes=NodalPlanes(
            nodal_plane_1=NodalPlane(strike=180,
                                     dip=20,
                                     rake=30,
                                     strike_errors=QuantityError(10),
                                     dip_errors=QuantityError(10),
                                     rake_errors=QuantityError(20))),
                       method_id=ResourceIdentifier(
                           "smi:nc.anss.org/focalMechanism/FPFIT"),
                       creation_info=CreationInfo(agency_id="NC"),
                       misfit=0.5,
                       station_distribution_ratio=0.8))
    # Need to test high-precision origin and that it is preferred origin.
    # Moment tensor includes another origin
    test_event.origins.append(
        Origin(time=UTCDateTime("2012-03-26") + 1.2,
               latitude=45.1,
               longitude=25.2,
               depth=14500))
    test_event.magnitudes.append(
        Magnitude(mag=0.1,
                  magnitude_type='MW',
                  creation_info=CreationInfo('TES'),
                  origin_id=test_event.origins[-1].resource_id))
    # Moment tensors go with focal-mechanisms
    test_event.focal_mechanisms.append(
        FocalMechanism(moment_tensor=MomentTensor(
            derived_origin_id=test_event.origins[-1].resource_id,
            moment_magnitude_id=test_event.magnitudes[-1].resource_id,
            scalar_moment=100,
            tensor=Tensor(
                m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15),
            method_id=ResourceIdentifier(
                'smi:nc.anss.org/momentTensor/BLAH'))))
    return test_event
Example #3
0
def readheader(sfile):
    """
    Read header information from a seisan nordic format S-file.
    Returns an obspy.core.event.Catalog type: note this changed for version \
    0.1.0 from the inbuilt class types.

    :type sfile: str
    :param sfile: Path to the s-file

    :returns: :class: obspy.core.event.Event

    >>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' +
    ...                    '01-0411-15L.S201309')
    >>> print(event.origins[0].time)
    2013-09-01T04:11:15.700000Z
    """
    import warnings
    from obspy.core.event import Event, Origin, Magnitude, Comment
    from obspy.core.event import EventDescription, CreationInfo
    f = open(sfile, 'r')
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = f.readline()
    if not len(topline.rstrip()) == 80:
        raise IOError('s-file has a corrupt header, not 80 char long')
    f.seek(0)
    for line in f:
        if line[79] in [' ', '1']:
            topline = line
            break
        if line[79] == '7':
            raise IOError('No header found, corrupt s-file?')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        warnings.warn("Couldn't read a date from sfile: " + sfile)
        new_event.origins.append(Origin(time=UTCDateTime(0)))
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    if not _float_conv(topline[23:30]) == 999:
        new_event.origins[0].latitude = _float_conv(topline[23:30])
        new_event.origins[0].longitude = _float_conv(topline[31:38])
        new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000
    else:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = float('NaN')
        new_event.origins[0].longitude = float('NaN')
        new_event.origins[0].depth = float('NaN')
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
    ksta = Comment(text='Number of stations=' + topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    # new_event.origins[0].nsta??? = _int_conv(topline[49:51])
    if not _float_conv(topline[51:55]) == 999:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    if len(topline[59].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[0].mag = _float_conv(topline[56:59])
        new_event.magnitudes[0].magnitude_type = topline[59]
        new_event.magnitudes[0].creation_info = \
            CreationInfo(agency_id=topline[60:63].strip())
        new_event.magnitudes[0].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[67].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[1].mag = _float_conv(topline[64:67])
        new_event.magnitudes[1].magnitude_type = topline[67]
        new_event.magnitudes[1].creation_info = \
            CreationInfo(agency_id=topline[68:71].strip())
        new_event.magnitudes[1].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[75].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[2].mag = _float_conv(topline[72:75])
        new_event.magnitudes[2].magnitude_type = topline[75]
        new_event.magnitudes[2].creation_info = \
            CreationInfo(agency_id=topline[76:79].strip())
        new_event.magnitudes[2].origin_id = new_event.origins[0].\
            resource_id
    f.close()
    # convert the nordic notation of magnitude to more general notation
    for _magnitude in new_event.magnitudes:
        _magnitude.magnitude_type = _nortoevmag(_magnitude.magnitude_type)
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = str(new_event.origins[0].resource_id)
    if len(new_event.magnitudes) > 1:
        try:
            # Select moment first, then local, then
            mag_filter = [
                'MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb', 'MS', 'Ms', 'Mc', 'MC'
            ]
            _magnitudes = [(m.magnitude_type, m.resource_id)
                           for m in new_event.magnitudes]
            preferred_magnitude = sorted(_magnitudes,
                                         key=lambda x: mag_filter.index(x[0]))
            new_event.preferred_magnitude_id = str(preferred_magnitude[0][1])
        except ValueError:
            # If there is a magnitude not specified in filter
            new_event.preferred_magnitude_id =\
                str(new_event.magnitudes[0].resource_id)
    elif len(new_event.magnitudes) == 1:
        new_event.preferred_magnitude_id =\
            str(new_event.magnitudes[0].resource_id)
    return new_event
Example #4
0
stationxml = "/home/baillard/Dropbox/_Moi/Projects/Axial/DATA/STATIONS/AXIAL_stations.xml"
time_delay = 20  # time in seconds
_network_code = 'OO'
_station_code = 'A*'
_location_code = '*'
_channel_code = '[E|H]H*'
pick_colormap = {'P': 'k', 'S': 'r'}

### Read Event

catalog = read_events(event_file)
single_event = catalog[8]

### Check if type is correct

if type(single_event) is not type(Event()):
    raise Exception('Event given is not and obspy Event')

### Read Station xml

inv = read_inventory(stationxml, format="STATIONXML")

### Read SDS client

client = sds_client(sds_root, sds_type='D', format='MSEED')

### Read starttime from preferred origin

origin_pref = [
    x for x in single_event.origins
    if x.resource_id == single_event.preferred_origin_id
Example #5
0
        from obspy.clients.fdsn import Client
        from obspy.core.event import Event, Origin, Magnitude
        if not user and not password:
            client = Client("IRIS")
        else:
            client = Client("IRIS", user=user, password=password)
    # will only work for events in the 'IRIS' catalog
    # (future: for Alaska events, read the AEC catalog)
        if use_catalog == 1:
            print("WARNING using event data from the IRIS catalog")
            cat = client.get_events(starttime = otime - sec_before_after_event,\
                                       endtime = otime + sec_before_after_event)
            ev = cat[0]
        else:
            print("WARNING using event data from user-defined catalog")
            ev = Event()
            org = Origin()
            org.latitude = elat
            org.longitude = elon
            org.depth = edep
            org.time = otime
            mag = Magnitude()
            mag.mag = emag
            mag.magnitude_type = "Mw"
            ev.origins.append(org)
            ev.magnitudes.append(mag)

            ref_time_place = Event()
            ref_org = Origin()
            ref_org.latitude = rlat
            ref_org.longitude = rlon
Example #6
0
    def simulate_trace(
        self,
        max_amplitude: float = 1e-4,
        width: float = 0.2,
        noise: bool = False,
    ) -> Tuple[Trace, Event, float, float]:
        """ Make a wood-anderson trace and convolve it with a response. """
        from scipy.signal import ricker

        # Make dummy data in meters on Wood Anderson
        np.random.seed(42)
        if noise:
            data = np.random.randn(int(self.sampling_rate * self.length))
        else:
            data = np.zeros(int(self.sampling_rate * self.length))
        wavelet = ricker(
            int(self.sampling_rate * self.length),
            a=(width * self.sampling_rate / 4))
        wavelet /= wavelet.max()
        wavelet *= max_amplitude
        vals = sorted([(val, ind) for ind, val in enumerate(wavelet)],
                      key=lambda x: x[0])
        period = abs(vals[0][1] - vals[1][1]) / self.sampling_rate
        half_max = (wavelet.max() - wavelet.min()) / 2

        data += wavelet

        # Make dummy trace
        tr = Trace(data=data, header=dict(
            station=self.inv[0][0].code,
            network=self.inv[0].code,
            location=self.inv[0][0][0].location_code,
            channel=self.inv[0][0][0].code,
            sampling_rate=self.sampling_rate,
            starttime=UTCDateTime(2020, 1, 1)))
        tr = tr.detrend()

        # Remove Wood-Anderson response and simulate seismometer
        resp = self.inv[0][0][0].response
        paz = resp.get_paz()
        paz = {
            'poles': paz.poles,
            'zeros': paz.zeros,
            'gain': paz.normalization_factor,
            'sensitivity': resp.instrument_sensitivity.value,
        }
        tr = tr.simulate(
            paz_remove=PAZ_WA, paz_simulate=paz)

        # Make an event
        mid_point = tr.stats.starttime + 0.5 * (
                tr.stats.endtime - tr.stats.starttime)
        event = Event(picks=[
            Pick(phase_hint="P",
                 time=mid_point - 3.0,
                 waveform_id=WaveformStreamID(
                     network_code=tr.stats.network,
                     station_code=tr.stats.station,
                     location_code=tr.stats.location,
                     channel_code=tr.stats.channel)),
            Pick(phase_hint="S", time=mid_point,
                 waveform_id=WaveformStreamID(
                     network_code=tr.stats.network,
                     station_code=tr.stats.station,
                     location_code=tr.stats.location,
                     channel_code=tr.stats.channel))])
        return tr, event, period, half_max
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    global CURRENT_TYPE

    base_name = os.path.basename(filename)

    if base_name.lower().startswith("baynet"):
        CURRENT_TYPE = "baynet"
    elif base_name.lower().startswith("earthworm"):
        CURRENT_TYPE = "earthworm"
    elif base_name.lower().startswith("gof"):
        CURRENT_TYPE = "seiscomp3"
    elif base_name.lower().startswith("obspyck") or base_name == "5622":
        CURRENT_TYPE = "obspyck"
    elif base_name.lower().startswith("toni"):
        CURRENT_TYPE = "toni"
    else:
        print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH"
        raise Exception

    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()
    catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id])

    # Read the event_type tag.
    account = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    public = parser.xpath2obj('event_type/public', parser, str)
    public = {
        "True": True,
        "False": False,
        "true": True,
        "false": False
    }.get(public, None)
    if account is not None and account.lower() != "sysop":
        public = False
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    author = user
    if CURRENT_TYPE in ["seiscomp3", "earthworm"]:
        public = False
        author = CURRENT_TYPE
        global_evaluation_mode = "automatic"
    elif CURRENT_TYPE in ["baynet", "toni"]:
        public = True
        author = CURRENT_TYPE
        global_evaluation_mode = "manual"
    creation_info = {
        "author": author,
        "agency_id": "Erdbebendienst Bayern",
        "agency_uri": "%s/agency" % RESOURCE_ROOT,
        "creation_time": NOW
    }

    # Create the event object.
    event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]),
                  creation_info=creation_info)
    # If account is None or 'sysop' and public is true, write 'public in the
    # comment, 'private' otherwise.
    event.extra = AttribDict()
    event.extra.public = {'value': public, 'namespace': NAMESPACE}
    event.extra.evaluationMode = {
        'value': global_evaluation_mode,
        'namespace': NAMESPACE
    }

    event_type = parser.xpath2obj('type', parser, str)
    if event_type is not None:
        if event_type == "induced earthquake":
            event_type = "induced or triggered event"
        if event_type != "null":
            event.event_type = event_type

    # Parse the origins.
    origins = parser.xpath("origin")
    if len(origins) > 1:
        msg = "Only files with a single origin are currently supported"
        raise Exception(msg)
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el, origin)
        if magnitude.mag is None:
            continue
        event.magnitudes.append(magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        if pick is None:
            continue
        event.picks.append(pick)
        # The arrival object gets the following things from the Seishub.pick
        # objects
        # arrival.time_weight = pick.phase_weight
        # arrival.time_residual = pick.phase_res
        # arrival.azimuth = pick.azimuth
        # arrival.take_off_angle = pick.incident
        # arrival.distance = hyp_dist
        arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick)
        if event.origins:
            event.origins[0].arrivals.append(arrival)

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)

    # Parse the amplitudes
    # we don't reference their id in the corresponding station magnitude,
    # because we use one amplitude measurement for each component
    for el in parser.xpath("stationMagnitude/amplitude"):
        event.amplitudes.append(__toAmplitude(parser, el))

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    for _i, stat_mag in enumerate(event.station_magnitudes):
        contrib = StationMagnitudeContribution()
        weight = None
        # The order of station magnitude objects is the same as in the xml
        # file.
        weight = parser.xpath2obj("weight",
                                  parser.xpath("stationMagnitude")[_i], float)
        if weight is not None:
            contrib.weight = weight
        contrib.station_magnitude_id = stat_mag.resource_id
        event.magnitudes[0].station_magnitude_contributions.append(contrib)

    for foc_mec_el in parser.xpath("focalMechanism"):
        foc_mec = __toFocalMechanism(parser, foc_mec_el)
        if foc_mec is not None:
            event.focal_mechanisms.append(foc_mec)

    # Set the origin id for the focal mechanisms. There is only one origin per
    # SeisHub event file.
    for focmec in event.focal_mechanisms:
        focmec.triggering_origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
Example #8
0
def xcorr_pick_family(family,
                      stream,
                      shift_len=0.2,
                      min_cc=0.4,
                      min_cc_from_mean_cc_factor=None,
                      horizontal_chans=['E', 'N', '1', '2'],
                      vertical_chans=['Z'],
                      cores=1,
                      interpolate=False,
                      plot=False,
                      plotdir=None,
                      export_cc=False,
                      cc_dir=None):
    """
    Compute cross-correlation picks for detections in a family.

    :type family: `eqcorrscan.core.match_filter.family.Family`
    :param family: Family to calculate correlation picks for.
    :type stream: `obspy.core.stream.Stream`
    :param stream:
        Data stream containing data for all (or a subset of) detections in
        the Family
    :type shift_len: float
    :param shift_len:
        Shift length allowed for the pick in seconds, will be plus/minus this
        amount - default=0.2
    :type min_cc: float
    :param min_cc:
        Minimum cross-correlation value to be considered a pick, default=0.4.
    :type min_cc_from_mean_cc_factor: float
    :param min_cc_from_mean_cc_factor:
        If set to a value other than None, then the minimum cross-correlation
        value for a trace is set individually for each detection based on:
        min(detect_val / n_chans * min_cc_from_mean_cc_factor, min_cc).
    :type horizontal_chans: list
    :param horizontal_chans:
        List of channel endings for horizontal-channels, on which S-picks will
        be made.
    :type vertical_chans: list
    :param vertical_chans:
        List of channel endings for vertical-channels, on which P-picks will
        be made.
    :type cores: int
    :param cores:
        Number of cores to use in parallel processing, defaults to one.
    :type interpolate: bool
    :param interpolate:
        Interpolate the correlation function to achieve sub-sample precision.
    :type plot: bool
    :param plot:
        To generate a plot for every detection or not, defaults to False
    :type plotdir: str
    :param plotdir:
        Path to plotting folder, plots will be output here.
    :type export_cc: bool
    :param export_cc:
        To generate a binary file in NumPy for every detection or not,
        defaults to False
    :type cc_dir: str
    :param cc_dir:
        Path to saving folder, NumPy files will be output here.

    :return: Dictionary of picked events keyed by detection id.
    """
    picked_dict = {}
    delta = family.template.st[0].stats.delta
    detect_streams_dict = _prepare_data(family=family,
                                        detect_data=stream,
                                        shift_len=shift_len)
    detection_ids = list(detect_streams_dict.keys())
    detect_streams = [
        detect_streams_dict[detection_id] for detection_id in detection_ids
    ]
    if len(detect_streams) == 0:
        Logger.warning("No appropriate data found, check your family and "
                       "detections - make sure seed ids match")
        return picked_dict
    if len(detect_streams) != len(family):
        Logger.warning("Not all detections have matching data. "
                       "Proceeding anyway. HINT: Make sure SEED IDs match")
    # Correlation function needs a list of streams, we need to maintain order.
    ccc, chans = _concatenate_and_correlate(streams=detect_streams,
                                            template=family.template.st,
                                            cores=cores)
    for i, detection_id in enumerate(detection_ids):
        detection = [d for d in family.detections if d.id == detection_id][0]
        correlations = ccc[i]
        if export_cc:
            os.makedirs(cc_dir, exist_ok=True)
            fname = f"{detection_id}-cc.npy"
            np.save(os.path.join(cc_dir, f'{fname}'), correlations)
            Logger.info(f"Saved correlation statistic to {fname} (lag_calc)")
        picked_chans = chans[i]
        detect_stream = detect_streams_dict[detection_id]
        checksum, cccsum, used_chans = 0.0, 0.0, 0
        event = Event()
        if min_cc_from_mean_cc_factor is not None:
            cc_thresh = min(
                detection.detect_val / detection.no_chans *
                min_cc_from_mean_cc_factor, min_cc)
            Logger.info('Setting minimum cc-threshold for detection %s to %s',
                        detection.id, str(cc_thresh))
        else:
            cc_thresh = min_cc
        for correlation, stachan in zip(correlations, picked_chans):
            if not stachan.used:
                continue
            tr = detect_stream.select(station=stachan.channel[0],
                                      channel=stachan.channel[1])[0]
            if interpolate:
                shift, cc_max = _xcorr_interp(correlation, dt=delta)
            else:
                cc_max = np.amax(correlation)
                shift = np.argmax(correlation) * delta
            if np.isnan(cc_max):  # pragma: no cover
                Logger.error(
                    'Problematic trace, no cross correlation possible')
                continue
            picktime = tr.stats.starttime + shift
            checksum += cc_max
            used_chans += 1
            if cc_max < cc_thresh:
                Logger.debug('Correlation of {0} is below threshold, not '
                             'using'.format(cc_max))
                continue
            cccsum += cc_max
            phase = None
            if stachan.channel[1][-1] in vertical_chans:
                phase = 'P'
            elif stachan.channel[1][-1] in horizontal_chans:
                phase = 'S'
            _waveform_id = WaveformStreamID(seed_string=tr.id)
            event.picks.append(
                Pick(waveform_id=_waveform_id,
                     time=picktime,
                     method_id=ResourceIdentifier('EQcorrscan'),
                     phase_hint=phase,
                     creation_info='eqcorrscan.core.lag_calc',
                     evaluation_mode='automatic',
                     comments=[Comment(text='cc_max={0}'.format(cc_max))]))
        event.resource_id = ResourceIdentifier(detection_id)
        event.comments.append(Comment(text="detect_val={0}".format(cccsum)))
        # Add template-name as comment to events
        event.comments.append(
            Comment(text="Detected using template: {0}".format(
                family.template.name)))
        if used_chans == detection.no_chans:  # pragma: no cover
            if detection.detect_val is not None and\
               checksum - detection.detect_val < -(0.3 * detection.detect_val):
                msg = ('lag-calc has decreased cccsum from %f to %f - ' %
                       (detection.detect_val, checksum))
                Logger.error(msg)
                continue
        else:
            Logger.warning(
                'Cannot check if cccsum is better, used {0} channels for '
                'detection, but {1} are used here'.format(
                    detection.no_chans, used_chans))
        picked_dict.update({detection_id: event})
    if plot:  # pragma: no cover
        for i, event in enumerate(picked_dict.values()):
            if len(event.picks) == 0:
                continue
            plot_stream = detect_streams[i].copy()
            template_plot = family.template.st.copy()
            pick_stachans = [(pick.waveform_id.station_code,
                              pick.waveform_id.channel_code)
                             for pick in event.picks]
            for tr in plot_stream:
                if (tr.stats.station, tr.stats.channel) \
                        not in pick_stachans:
                    plot_stream.remove(tr)
            for tr in template_plot:
                if (tr.stats.station, tr.stats.channel) \
                        not in pick_stachans:
                    template_plot.remove(tr)
            if plotdir is not None:
                if not os.path.isdir(plotdir):
                    os.makedirs(plotdir)
                savefile = "{plotdir}/{rid}.png".format(
                    plotdir=plotdir, rid=event.resource_id.id)
                plot_repicked(template=template_plot,
                              picks=event.picks,
                              det_stream=plot_stream,
                              show=False,
                              save=True,
                              savefile=savefile)
            else:
                plot_repicked(template=template_plot,
                              picks=event.picks,
                              det_stream=plot_stream,
                              show=True)
    return picked_dict
Example #9
0
def _dbs_associator(start_time,
                    end_time,
                    moving_window,
                    tbl,
                    pair_n,
                    save_dir,
                    station_list,
                    consider_combination=False):

    if consider_combination == True:

        Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")
        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 0
        tt = st
        #pbar = tqdm(total= int(np.ceil(total_t.total_seconds()/moving_window)), ncols=100)
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]

            #    pbar.update()
            if len(detections) >= pair_n:
                evid += 1

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                if len(detections) / pair_n <= 2:
                    ch = pair_n
                else:
                    ch = int(len(detections) - pair_n)

                picks = []
                for ns in range(ch, len(detections) + 1):
                    comb = 0
                    for ind in list(combinations(detections.index, ns)):
                        comb += 1
                        selected_detections = detections.loc[ind, :]
                        sorted_detections = selected_detections.sort_values(
                            'p_arrival_time')

                        Y2000_writer.write(
                            "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                            % (int(yr), int(mo), int(dy), int(hr), int(mi),
                               float(sec), float(st_lat_DMS[0]),
                               str(st_lat_DMS[1]), float(st_lat_DMS[2]),
                               float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                               float(st_lon_DMS[2]), float(depth), float(mag)))

                        station_buffer = []
                        row_buffer = []
                        tr_names = []
                        tr_names2 = []
                        for _, row in sorted_detections.iterrows():

                            trace_name = row['traceID'] + '*' + row[
                                'station'] + '*' + str(row['event_start_time'])
                            p_unc = row['p_unc']
                            p_prob = row['p_prob']
                            s_unc = row['s_unc']
                            s_prob = row['s_prob']

                            station = "{:<5}".format(row['station'])
                            network = "{:<2}".format(row['network'])
                            try:
                                yrp = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mop = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dyp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mip = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_p = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="P")
                                picks.append(p)

                                if p_unc:
                                    Pweihgt = _weighcalculator_prob(
                                        p_prob * (1 - p_unc))
                                else:
                                    Pweihgt = _weighcalculator_prob(p_prob)
                                try:
                                    Pweihgt = int(Pweihgt)
                                except Exception:
                                    Pweihgt = 4

                            except Exception:
                                sec_p = None

                            try:
                                yrs = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mos = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dys = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrs = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mis = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_s = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="S")
                                picks.append(p)

                                if s_unc:
                                    Sweihgt = _weighcalculator_prob(
                                        s_prob * (1 - s_unc))
                                else:
                                    Sweihgt = _weighcalculator_prob(s_prob)
                                try:
                                    Sweihgt = int(Sweihgt)
                                except Exception:
                                    Sweihgt = 4

                            except Exception:
                                sec_s = None

                            if row['station'] not in station_buffer:
                                tr_names.append(trace_name)
                                station_buffer.append(row['station'])
                                if sec_s:
                                    Y2000_writer.write(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis),
                                         float(0.0), float(sec_s), Sweihgt))
                                if sec_p:
                                    Y2000_writer.write(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                            else:
                                tr_names2.append(trace_name)
                                if sec_s:
                                    row_buffer.append(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis), 0.0,
                                         float(sec_s), Sweihgt))
                                if sec_p:
                                    row_buffer.append(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                        Y2000_writer.write("{:<62}".format(' ') + "%10d" %
                                           (evid) + '\n')

                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

            tt += timedelta(seconds=moving_window)

    #   plt.scatter(LTTP, TTP, s=10, marker='o', c='b', alpha=0.4, label='P')
    #   plt.scatter(LTTS, TTS, s=10, marker='o', c='r', alpha=0.4, label='S')
    #   plt.legend('upper right')
    #   plt.show()

        print('The Number of Realizations: ' + str(evid) + '\n', flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")
        f.write(jj)
        f.close()

    else:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        cat = Catalog()
        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 200000
        evidd = 100000
        tt = st
        #pbar = tqdm(total= int(np.ceil(total_t.total_seconds()/moving_window)))
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pair_nt = pair_n
            for _, row in detections.iterrows():
                station = "{:<5}".format(row['station'])
                if station[0] is "R":
                    pair_nt = 3
            #pbar.update()
            if len(detections) >= pair_nt:

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                Y2000_writer.write(
                    "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                    % (int(yr), int(mo), int(dy), int(hr), int(mi), float(sec),
                       float(st_lat_DMS[0]), str(st_lat_DMS[1]),
                       float(st_lat_DMS[2]), float(st_lon_DMS[0]),
                       str(st_lon_DMS[1]), float(
                           st_lon_DMS[2]), float(depth), float(mag)))
                event = Event()
                origin = Origin(time=UTCDateTime(
                    detections.iloc[0]['event_start_time']),
                                longitude=detections.iloc[0]['stlon'],
                                latitude=detections.iloc[0]['stlat'],
                                method="EqTransformer")
                event.origins.append(origin)

                station_buffer = []
                row_buffer = []
                sorted_detections = detections.sort_values('p_arrival_time')
                tr_names = []
                tr_names2 = []
                picks = []
                for _, row in sorted_detections.iterrows():
                    trace_name = row['traceID'] + '*' + row[
                        'station'] + '*' + str(row['event_start_time'])
                    p_unc = row['p_unc']
                    p_prob = row['p_prob']
                    s_unc = row['s_unc']
                    s_prob = row['s_prob']

                    station = "{:<5}".format(row['station'])
                    network = "{:<2}".format(row['network'])

                    try:
                        yrp = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mop = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dyp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mip = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_p = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['p_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="P",
                                 method_id="Transformer")
                        picks.append(p)

                        if p_unc:
                            Pweihgt = _weighcalculator_prob(p_prob *
                                                            (1 - p_unc))
                        else:
                            Pweihgt = _weighcalculator_prob(p_prob)
                        try:
                            Pweihgt = int(Pweihgt)
                        except Exception:
                            Pweihgt = 4

                    except Exception:
                        sec_p = None

                    try:
                        yrs = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mos = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dys = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrs = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mis = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_s = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['s_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="S",
                                 method_id="Transformer")
                        picks.append(p)

                        if s_unc:
                            Sweihgt = _weighcalculator_prob(s_prob *
                                                            (1 - s_unc))
                        else:
                            Sweihgt = _weighcalculator_prob(s_prob)
                        try:
                            Sweihgt = int(Sweihgt)
                        except Exception:
                            Sweihgt = 4

                    except Exception:
                        sec_s = None

                    if row['station'] not in station_buffer:
                        tr_names.append(trace_name)
                        station_buffer.append(row['station'])
                        if sec_s:
                            Y2000_writer.write(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), float(0.0),
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            Y2000_writer.write(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                    else:
                        tr_names2.append(trace_name)
                        if sec_s:
                            row_buffer.append(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), 0.0,
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            row_buffer.append(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                event.picks = picks
                event.preferred_origin_id = event.origins[0].resource_id
                cat.append(event)

                evid += 1
                Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                   '\n')
                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_nt:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    evid += 1
                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

                elif len(row_buffer) < pair_nt and len(row_buffer) != 0:
                    evidd += 1
                    traceNmae_dic[str(evidd)] = tr_names2

            elif len(detections) < pair_nt and len(detections) != 0:
                tr_names = []
                for _, row in detections.iterrows():
                    trace_name = row['traceID']
                    tr_names.append(trace_name)
                evidd += 1
                traceNmae_dic[str(evidd)] = tr_names

            tt += timedelta(seconds=moving_window)

        print('The Number of Associated Events: ' + str(evid - 200000) + '\n',
              flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")

        f.write(jj)
        f.close()
        cat.write(save_dir + "/associations.xml", format="QUAKEML")
        qml = quakeml.QuakeML.load_xml(filename=save_dir + "/associations.xml")
        events = qml.get_pyrocko_events()
        model.event.dump_events(events, filename=save_dir + '/events.pf')
Example #10
0
def setEventData(eventParser, arrivals, count):
    global originCount
    global eventCount
    global pickCount
    creation_info = CreationInfo(
        author='niket_engdahl_parser',
        creation_time=UTCDateTime(),
        agency_uri=ResourceIdentifier(id='smi:engdahl.ga.gov.au/ga-engdahl'),
        agency_id='ga-engdahl')

    #   magnitudeSurface = Magnitude(resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)+'#netMag.Ms'),
    #                         mag=eventParser.ms,
    #                         magnitude_type='Ms',
    #                         origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/'+str(originCount)),
    #                         azimuthal_gap=eventParser.openaz2,
    #                         creation_info=creation_info)
    origin = Origin(
        resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                       str(originCount)),
        time=UTCDateTime(int(str(2000 + int(eventParser.iyr))),
                         int(eventParser.mon), int(eventParser.iday),
                         int(eventParser.ihr), int(eventParser.min),
                         int(eventParser.sec.split('.')[0]),
                         int(eventParser.sec.split('.')[1] + '0')),
        longitude=eventParser.glon,
        latitude=eventParser.glat,
        depth=float(eventParser.depth) *
        1000,  # engdahl files report kms, obspy expects m
        depth_errors=eventParser.sedep,
        method_id=ResourceIdentifier(id='EHB'),
        earth_model_id=ResourceIdentifier(id='ak135'),
        quality=OriginQuality(associated_phase_count=len(arrivals),
                              used_phase_count=len(arrivals),
                              standard_error=eventParser.se,
                              azimuthal_gap=eventParser.openaz2),
        evaluation_mode='automatic',
        creation_info=creation_info)

    magnitude = Magnitude(
        resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                       str(originCount) + '#netMag.Mb'),
        mag=eventParser.mb,
        magnitude_type='Mb',
        origin_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/origin/' +
                                     str(originCount)),
        azimuthal_gap=eventParser.openaz1,
        creation_info=creation_info)

    originCount += 1

    pickList = []
    arrivalList = []
    pPhaseArrival = None
    for arrParser in arrivals:
        pickOnset = None
        pol = None

        if arrParser.year and arrParser.month and arrParser.day and arrParser.station:
            pPhaseArrival = arrParser
        else:
            arrParser.year = pPhaseArrival.year
            arrParser.day = pPhaseArrival.day
            arrParser.month = pPhaseArrival.month
            arrParser.station = pPhaseArrival.station
            arrParser.delta = pPhaseArrival.delta
            arrParser.dtdd = pPhaseArrival.dtdd
            arrParser.backaz = pPhaseArrival.backaz
            arrParser.focalDip = pPhaseArrival.focalDip
            arrParser.angleAzimuth = pPhaseArrival.angleAzimuth

        if arrParser.phase1 == 'LR' or arrParser.phase2 == 'LR' or arrParser.hour == '24':
            continue

        if arrParser.phase1.startswith('i'):
            pickOnset = PickOnset.impulsive
            if arrParser.fm == '+':
                pol = PickPolarity.positive
            elif arrParser.fm == '-':
                pol = PickPolarity.negative
        elif arrParser.phase1.startswith('e'):
            pickOnset = PickOnset.emergent

        pick = Pick(
            resource_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' +
                                           str(pickCount)),
            time=UTCDateTime(int(str(2000 + int(arrParser.year))),
                             int(arrParser.month), int(arrParser.day),
                             int(arrParser.hour), int(arrParser.minute),
                             int(arrParser.second.split('.')[0]),
                             int(arrParser.second.split('.')[1] + '0')),
            waveform_id=WaveformStreamID(network_code='',
                                         station_code=arrParser.station,
                                         channel_code='BHZ'),
            methodID=ResourceIdentifier('STA/LTA'),
            backazimuth=arrParser.backaz if arrParser.backaz else None,
            onset=pickOnset,
            phase_hint=arrParser.phase,
            polarity=pol,
            evaluation_mode='automatic',
            # TO-DO
            comment='populate all the remaining fields here as key value',
            creation_info=creation_info)
        if not arrParser.backaz:
            print "arrParser.backaz is empty. printing the arrParser for debugging"
        pickCount += 1
        pickList.append(pick)

        arrival = Arrival(
            pick_id=ResourceIdentifier(id='smi:engdahl.ga.gov.au/pick/' +
                                       str(pickCount - 1)),
            phase=arrParser.phase if arrParser.phase else None,
            azimuth=arrParser.backaz if arrParser.backaz else None,
            distance=arrParser.delta if arrParser.delta else None,
            # if the * has some significance, it should be accounted for. ignoring for now.
            time_residual=arrParser.residual.rstrip('*'),
            time_weight=arrParser.wgt if arrParser.wgt else None,
            backazimuth_weight=arrParser.wgt if arrParser.wgt else None)
        arrivalList.append(arrival)
        if not arrParser.wgt:
            print "arrParser.wgt is empty. printing the arrParser for debugging"


#          pprint.pprint(arrParser)

    origin.arrivals = arrivalList

    event = Event(resource_id=ResourceIdentifier(
        id='smi:engdahl.ga.gov.au/event/' + str(eventCount)),
                  creation_info=creation_info,
                  event_type='earthquake')

    eventCount += 1

    event.picks = pickList
    event.origins = [
        origin,
    ]
    event.magnitudes = [
        magnitude,
    ]
    event.preferred_origin_id = origin.resource_id
    event.preferred_magnitude_id = magnitude.resource_id
    return event
Example #11
0
    def test_creating_minimal_QuakeML_with_MT(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(
            mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time, latitude=lat, longitude=lon,
                           depth=depth, resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populate event moment tensor
        ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = io.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = _read_quakeml(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
Example #12
0
def plot_event(single_event,
               sds_root,
               time_delay,
               time_before,
               _network_code='*',
               _station_code='*',
               _location_code='*',
               _channel_code='*Z'):

    ### Define pick colors

    pick_colormap = {'P': 'k', 'S': 'r'}

    if type(single_event) is not type(Event()):
        raise Exception('Event given is not and obspy Event')

    ### Read SDS client

    client = sds_client(sds_root, sds_type='D', format='MSEED')

    ### Read starttime from preferred origin

    origin_pref = [
        x for x in single_event.origins
        if x.resource_id == single_event.preferred_origin_id
    ][0]
    _starttime = origin_pref.time - time_before
    _endtime = _starttime + time_delay

    st = client.get_waveforms(_network_code, _station_code, _location_code,
                              _channel_code, _starttime, _endtime)

    ### Pre-process streams

    st.sort(keys=['network', 'station', 'channel'])
    st.taper(type="cosine", max_percentage=0.1)
    st.filter("bandpass", freqmin=3, freqmax=30, corners=4)
    st.taper(type="cosine", max_percentage=0.1)

    #st.plot(starttime=origin_pref.time, endtime=origin_pref.time+3, fig=fig2)

    ### Start plotting waveforms

    fig = plt.figure(figsize=(20, 12))

    st_num = len(st)
    axs = []

    for i, tr in enumerate(st):
        net, sta, loc, cha = tr.id.split(".")
        if loc == '':
            loc = None

        starttime_relative = tr.stats.starttime - _starttime
        sampletimes = np.arange(
            starttime_relative,
            starttime_relative + (tr.stats.delta * tr.stats.npts),
            tr.stats.delta)
        #
        if len(sampletimes) == tr.stats.npts + 1:
            sampletimes = sampletimes[:-1]

        if i == 0:
            ax = fig.add_subplot(st_num, 1, i + 1)
        else:
            ax = fig.add_subplot(st_num, 1, i + 1, sharex=axs[0])

        axs.append(ax)
        ax.plot(sampletimes, tr.data)
        ### Add label
        ax.text(0.1, 0.90, tr.id, transform=ax.transAxes)

    fig.subplots_adjust(hspace=0)
    axs[0].set_xlim(0, sampletimes[-1])

    ### Plot Picks on top of the waveforms

    picks = single_event.picks
    arrivals = single_event.origins[0].arrivals

    # Select picks that are in arrivals

    picks_sel = [getPickForArrival(picks, arrival) for arrival in arrivals]

    for i, tr in enumerate(st):
        ax = axs[i]
        net, sta, loc, chan = tr.id.split('.')
        if net == '':
            net = None
        if sta == '':
            continue
        if loc == '':
            loc = None
        print(sta)
        picks_tr = getPicks(picks_sel, net, sta, loc)
        for pick_tr in picks_tr:
            phase_pick = pick_tr.phase_hint
            rel_time_pick = pick_tr.time - _starttime
            ax.axvline(x=rel_time_pick,
                       ymin=0,
                       ymax=1,
                       color=pick_colormap[phase_pick],
                       lw=2)
Example #13
0
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if
        it is in geographical coordinates already like for NonLinLoc in
        global mode).
        The function should accept three arguments x, y, z and return a
        tuple of three values (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        `pick_id` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    lines = data.splitlines()

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    # determine indices of block start/end of the NLLOC output file
    indices_hyp = [None, None]
    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("NLLOC "):
            indices_hyp[0] = i
        elif line.startswith("END_NLLOC"):
            indices_hyp[1] = i
        elif line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i
    if any([i is None for i in indices_hyp]):
        msg = ("NLLOC HYP file seems corrupt,"
               " could not detect 'NLLOC' and 'END_NLLOC' lines.")
        raise RuntimeError(msg)
    # strip any other lines around NLLOC block
    lines = lines[indices_hyp[0]:indices_hyp[1]]

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    # maximum likelihood origin location info line
    line = lines["HYPOCENTER"]

    x, y, z = map(float, line.split()[1:7:2])

    if coordinate_converter:
        x, y, z = coordinate_converter(x, y, z)

    # origin time info line
    line = lines["GEOGRAPHIC"]

    year, month, day, hour, minute = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, month, day, hour, minute, seconds)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_XX = float(line.split()[7])
    covariance_YY = float(line.split()[13])
    covariance_ZZ = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    cat = Catalog(events=[event])
    o = Origin()
    event.origins = [o]
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string))

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version)
    event.creation_info.version = version
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX))
    except ValueError:
        if covariance_XX < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY))
    except ValueError:
        if covariance_YY < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_ZZ) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        wid = WaveformStreamID(station_code=station)
        date, hourmin, sec = map(str, line[6:9])
        t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    return cat
Example #14
0
def match_filter(template_names, template_list, st, threshold,
                 threshold_type, trig_int, plotvar, plotdir='.', cores=1,
                 debug=0, plot_format='png', output_cat=False,
                 extract_detections=False, arg_check=True):
    """
    Main matched-filter detection function.

    Over-arching code to run the correlations of given templates with a \
    day of seismic data and output the detections based on a given threshold.
    For a functional example see the tutorials.

    :type template_names: list
    :param template_names: List of template names in the same order as \
        template_list
    :type template_list: list
    :param template_list: A list of templates of which each template is a \
        Stream of obspy traces containing seismic data and header information.
    :type st: obspy.core.stream.Stream
    :param st: A Stream object containing all the data available and \
        required for the correlations with templates given.  For efficiency \
        this should contain no excess traces which are not in one or more of \
        the templates.  This will now remove excess traces internally, but \
        will copy the stream and work on the copy, leaving your input stream \
        untouched.
    :type threshold: float
    :param threshold: A threshold value set based on the threshold_type
    :type threshold_type: str
    :param threshold_type: The type of threshold to be used, can be MAD, \
        absolute or av_chan_corr.  See Note on thresholding below.
    :type trig_int: float
    :param trig_int: Minimum gap between detections in seconds.
    :type plotvar: bool
    :param plotvar: Turn plotting on or off
    :type plotdir: str
    :param plotdir: Path to plotting folder, plots will be output here, \
        defaults to run location.
    :type cores: int
    :param cores: Number of cores to use
    :type debug: int
    :param debug: Debug output level, the bigger the number, the more the \
        output.
    :type plot_format: str
    :param plot_format: Specify format of output plots if saved
    :type output_cat: bool
    :param output_cat: Specifies if matched_filter will output an \
        obspy.Catalog class containing events for each detection. Default \
        is False, in which case matched_filter will output a list of \
        detection classes, as normal.
    :type extract_detections: bool
    :param extract_detections: Specifies whether or not to return a list of \
        streams, one stream per detection.
    :type arg_check: bool
    :param arg_check: Check arguments, defaults to True, but if running in \
        bulk, and you are certain of your arguments, then set to False.\n

    .. rubric::
        If neither `output_cat` or `extract_detections` are set to `True`,
        then only the list of :class:`eqcorrscan.core.match_filter.DETECTION`'s
        will be output:
    :return: :class:`eqcorrscan.core.match_filter.DETECTION`'s detections for
        each detection made.
    :rtype: list
    .. rubric::
        If `output_cat` is set to `True`, then the
        :class:`obspy.core.event.Catalog` will also be output:
    :return: Catalog containing events for each detection, see above.
    :rtype: :class:`obspy.core.event.Catalog`
    .. rubric::
        If `extract_detections` is set to `True` then the list of
        :class:`obspy.core.stream.Stream`'s will also be output.
    :return:
        list of :class:`obspy.core.stream.Stream`'s for each detection, see
        above.
    :rtype: list

    .. warning::
        Plotting within the match-filter routine uses the Agg backend
        with interactive plotting turned off.  This is because the function
        is designed to work in bulk.  If you wish to turn interactive
        plotting on you must import matplotlib in your script first, when you
        them import match_filter you will get the warning that this call to
        matplotlib has no effect, which will mean that match_filter has not
        changed the plotting behaviour.

    .. note::
        **Thresholding:**

        **MAD** threshold is calculated as the:

        .. math::

            threshold {\\times} (median(abs(cccsum)))

        where :math:`cccsum` is the cross-correlation sum for a given template.

        **absolute** threshold is a true absolute threshold based on the
        cccsum value.

        **av_chan_corr** is based on the mean values of single-channel
        cross-correlations assuming all data are present as required for the
        template, e.g:

        .. math::

            av\_chan\_corr\_thresh=threshold \\times (cccsum / len(template))

        where :math:`template` is a single template from the input and the
        length is the number of channels within this template.

    .. note::
        The output_cat flag will create an :class:`obspy.core.eventCatalog`
        containing one event for each
        :class:`eqcorrscan.core.match_filter.DETECTION`'s generated by
        match_filter. Each event will contain a number of comments dealing
        with correlation values and channels used for the detection. Each
        channel used for the detection will have a corresponding
        :class:`obspy.core.event.Pick` which will contain time and
        waveform information. **HOWEVER**, the user should note that, at
        present, the pick times do not account for the
        prepick times inherent in each template. For example, if a template
        trace starts 0.1 seconds before the actual arrival of that phase,
        then the pick time generated by match_filter for that phase will be
        0.1 seconds early. We are working on a solution that will involve
        saving templates alongside associated metadata.
    """
    import matplotlib
    matplotlib.use('Agg')
    if arg_check:
        # Check the arguments to be nice - if arguments wrong type the parallel
        # output for the error won't be useful
        if not type(template_names) == list:
            raise MatchFilterError('template_names must be of type: list')
        if not type(template_list) == list:
            raise MatchFilterError('templates must be of type: list')
        if not len(template_list) == len(template_names):
            raise MatchFilterError('Not the same number of templates as names')
        for template in template_list:
            if not type(template) == Stream:
                msg = 'template in template_list must be of type: ' +\
                      'obspy.core.stream.Stream'
                raise MatchFilterError(msg)
        if not type(st) == Stream:
            msg = 'st must be of type: obspy.core.stream.Stream'
            raise MatchFilterError(msg)
        if str(threshold_type) not in [str('MAD'), str('absolute'),
                                       str('av_chan_corr')]:
            msg = 'threshold_type must be one of: MAD, absolute, av_chan_corr'
            raise MatchFilterError(msg)

    # Copy the stream here because we will muck about with it
    stream = st.copy()
    templates = copy.deepcopy(template_list)
    _template_names = copy.deepcopy(template_names)
    # Debug option to confirm that the channel names match those in the
    # templates
    if debug >= 2:
        template_stachan = []
        data_stachan = []
        for template in templates:
            for tr in template:
                if isinstance(tr.data, np.ma.core.MaskedArray):
                    raise MatchFilterError('Template contains masked array,'
                                           ' split first')
                template_stachan.append(tr.stats.station + '.' +
                                        tr.stats.channel)
        for tr in stream:
            data_stachan.append(tr.stats.station + '.' + tr.stats.channel)
        template_stachan = list(set(template_stachan))
        data_stachan = list(set(data_stachan))
        if debug >= 3:
            print('I have template info for these stations:')
            print(template_stachan)
            print('I have daylong data for these stations:')
            print(data_stachan)
    # Perform a check that the continuous data are all the same length
    min_start_time = min([tr.stats.starttime for tr in stream])
    max_end_time = max([tr.stats.endtime for tr in stream])
    longest_trace_length = stream[0].stats.sampling_rate * (max_end_time -
                                                            min_start_time)
    for tr in stream:
        if not tr.stats.npts == longest_trace_length:
            msg = 'Data are not equal length, padding short traces'
            warnings.warn(msg)
            start_pad = np.zeros(int(tr.stats.sampling_rate *
                                     (tr.stats.starttime - min_start_time)))
            end_pad = np.zeros(int(tr.stats.sampling_rate *
                                   (max_end_time - tr.stats.endtime)))
            tr.data = np.concatenate([start_pad, tr.data, end_pad])
    # Perform check that all template lengths are internally consistent
    for i, temp in enumerate(template_list):
        if len(set([tr.stats.npts for tr in temp])) > 1:
            msg = ('Template %s contains traces of differing length, this is '
                   'not currently supported' % _template_names[i])
            raise MatchFilterError(msg)
    outtic = time.clock()
    if debug >= 2:
        print('Ensuring all template channels have matches in long data')
    template_stachan = {}
    # Work out what station-channel pairs are in the templates, including
    # duplicate station-channel pairs.  We will use this information to fill
    # all templates with the same station-channel pairs as required by
    # _template_loop.
    for template in templates:
        stachans_in_template = []
        for tr in template:
            stachans_in_template.append((tr.stats.network, tr.stats.station,
                                         tr.stats.location, tr.stats.channel))
        stachans_in_template = dict(Counter(stachans_in_template))
        for stachan in stachans_in_template.keys():
            if stachan not in template_stachan.keys():
                template_stachan.update({stachan:
                                         stachans_in_template[stachan]})
            elif stachans_in_template[stachan] > template_stachan[stachan]:
                template_stachan.update({stachan:
                                         stachans_in_template[stachan]})
    # Remove un-matched channels from templates.
    _template_stachan = copy.deepcopy(template_stachan)
    for stachan in template_stachan.keys():
        if not stream.select(network=stachan[0], station=stachan[1],
                             location=stachan[2], channel=stachan[3]):
            # Remove stachan from list of dictionary of template_stachans
            _template_stachan.pop(stachan)
            # Remove template traces rather than adding NaN data
            for template in templates:
                if template.select(network=stachan[0], station=stachan[1],
                                   location=stachan[2], channel=stachan[3]):
                    for tr in template.select(network=stachan[0],
                                              station=stachan[1],
                                              location=stachan[2],
                                              channel=stachan[3]):
                        template.remove(tr)
    template_stachan = _template_stachan
    # Remove un-needed channels from continuous data.
    for tr in stream:
        if not (tr.stats.network, tr.stats.station,
                tr.stats.location, tr.stats.channel) in \
                template_stachan.keys():
            stream.remove(tr)
    # Check for duplicate channels
    stachans = [(tr.stats.network, tr.stats.station,
                 tr.stats.location, tr.stats.channel) for tr in stream]
    c_stachans = Counter(stachans)
    for key in c_stachans.keys():
        if c_stachans[key] > 1:
            msg = ('Multiple channels for %s.%s.%s.%s, likely a data issue'
                   % (key[0], key[1], key[2], key[3]))
            raise MatchFilterError(msg)
    # Pad out templates to have all channels
    for template, template_name in zip(templates, _template_names):
        if len(template) == 0:
            msg = ('No channels matching in continuous data for ' +
                   'template' + template_name)
            warnings.warn(msg)
            templates.remove(template)
            _template_names.remove(template_name)
            continue
        for stachan in template_stachan.keys():
            number_of_channels = len(template.select(network=stachan[0],
                                                     station=stachan[1],
                                                     location=stachan[2],
                                                     channel=stachan[3]))
            if number_of_channels < template_stachan[stachan]:
                missed_channels = template_stachan[stachan] -\
                                  number_of_channels
                nulltrace = Trace()
                nulltrace.stats.update(
                    {'network': stachan[0], 'station': stachan[1],
                     'location': stachan[2], 'channel': stachan[3],
                     'sampling_rate': template[0].stats.sampling_rate,
                     'starttime': template[0].stats.starttime})
                nulltrace.data = np.array([np.NaN] * len(template[0].data),
                                          dtype=np.float32)
                for dummy in range(missed_channels):
                    template += nulltrace
        template.sort()
        # Quick check that this has all worked
        if len(template) != max([len(t) for t in templates]):
            raise MatchFilterError('Internal error forcing same template '
                                   'lengths, report this error.')
    if debug >= 2:
        print('Starting the correlation run for this day')
    if debug >= 4:
        for template in templates:
            print(template)
        print(stream)
    [cccsums, no_chans, chans] = _channel_loop(templates=templates,
                                               stream=stream,
                                               cores=cores,
                                               debug=debug)
    if len(cccsums[0]) == 0:
        raise MatchFilterError('Correlation has not run, zero length cccsum')
    outtoc = time.clock()
    print(' '.join(['Looping over templates and streams took:',
                    str(outtoc - outtic), 's']))
    if debug >= 2:
        print(' '.join(['The shape of the returned cccsums is:',
                        str(np.shape(cccsums))]))
        print(' '.join(['This is from', str(len(templates)), 'templates']))
        print(' '.join(['Correlated with', str(len(stream)),
                        'channels of data']))
    detections = []
    if output_cat:
        det_cat = Catalog()
    for i, cccsum in enumerate(cccsums):
        template = templates[i]
        if str(threshold_type) == str('MAD'):
            rawthresh = threshold * np.median(np.abs(cccsum))
        elif str(threshold_type) == str('absolute'):
            rawthresh = threshold
        elif str(threshold_type) == str('av_chan_corr'):
            rawthresh = threshold * no_chans[i]
        # Findpeaks returns a list of tuples in the form [(cccsum, sample)]
        print(' '.join(['Threshold is set at:', str(rawthresh)]))
        print(' '.join(['Max of data is:', str(max(cccsum))]))
        print(' '.join(['Mean of data is:', str(np.mean(cccsum))]))
        if np.abs(np.mean(cccsum)) > 0.05:
            warnings.warn('Mean is not zero!  Check this!')
        # Set up a trace object for the cccsum as this is easier to plot and
        # maintains timing
        if plotvar:
            _match_filter_plot(stream=stream, cccsum=cccsum,
                               template_names=_template_names,
                               rawthresh=rawthresh, plotdir=plotdir,
                               plot_format=plot_format, i=i)
        if debug >= 4:
            print(' '.join(['Saved the cccsum to:', _template_names[i],
                            stream[0].stats.starttime.datetime.
                           strftime('%Y%j')]))
            np.save(_template_names[i] +
                    stream[0].stats.starttime.datetime.strftime('%Y%j'),
                    cccsum)
        tic = time.clock()
        if max(cccsum) > rawthresh:
            peaks = findpeaks.find_peaks2_short(
                arr=cccsum, thresh=rawthresh,
                trig_int=trig_int * stream[0].stats.sampling_rate, debug=debug,
                starttime=stream[0].stats.starttime,
                samp_rate=stream[0].stats.sampling_rate)
        else:
            print('No peaks found above threshold')
            peaks = False
        toc = time.clock()
        if debug >= 1:
            print(' '.join(['Finding peaks took:', str(toc - tic), 's']))
        if peaks:
            for peak in peaks:
                detecttime = stream[0].stats.starttime +\
                    peak[1] / stream[0].stats.sampling_rate
                # Detect time must be valid QuakeML uri within resource_id.
                # This will write a formatted string which is still
                # readable by UTCDateTime
                rid = ResourceIdentifier(id=_template_names[i] + '_' +
                                         str(detecttime.
                                             strftime('%Y%m%dT%H%M%S.%f')),
                                         prefix='smi:local')
                ev = Event(resource_id=rid)
                cr_i = CreationInfo(author='EQcorrscan',
                                    creation_time=UTCDateTime())
                ev.creation_info = cr_i
                # All detection info in Comments for lack of a better idea
                thresh_str = 'threshold=' + str(rawthresh)
                ccc_str = 'detect_val=' + str(peak[0])
                used_chans = 'channels used: ' +\
                             ' '.join([str(pair) for pair in chans[i]])
                ev.comments.append(Comment(text=thresh_str))
                ev.comments.append(Comment(text=ccc_str))
                ev.comments.append(Comment(text=used_chans))
                min_template_tm = min([tr.stats.starttime for tr in template])
                for tr in template:
                    if (tr.stats.station, tr.stats.channel) not in chans[i]:
                        continue
                    else:
                        pick_tm = detecttime + (tr.stats.starttime -
                                                min_template_tm)
                        wv_id = WaveformStreamID(network_code=tr.stats.network,
                                                 station_code=tr.stats.station,
                                                 channel_code=tr.stats.channel)
                        ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
                detections.append(DETECTION(_template_names[i],
                                            detecttime,
                                            no_chans[i], peak[0], rawthresh,
                                            'corr', chans[i], event=ev))
                if output_cat:
                    det_cat.append(ev)
        if extract_detections:
            detection_streams = extract_from_stream(stream, detections)
    del stream, templates
    if output_cat and not extract_detections:
        return detections, det_cat
    elif not extract_detections:
        return detections
    elif extract_detections and not output_cat:
        return detections, detection_streams
    else:
        return detections, det_cat, detection_streams
Example #15
0
    def __init__(self):
        """
        ---------------- copied from old getwaveform.py ----------------------
        min_dist - minimum station distance (default = 20)
        max_dist - maximum station distance (default =300)
        before -   time window length before the event time (default= 100)
        after  -   time window length after the event time (default = 300)
        network -  network codes of potential stations (default=*)
        channel -  component(s) to get, accepts comma separated (default='BH*')
        ifresample_TF   - Boolean. Request resample or not. Default = False
        resample_freq   - sampling frequency to resample waveforms (default 20.0)
        ifrotate - Boolean, if true will output sac files rotated to baz
        unrotated sac files will also be written
        ifCapInp - Boolean, make weight files for CAP
        ifEvInfo - Boolean, output 'ev_info.dat' containg event info (True)
        ifRemoveResponse - Boolean, will remove response (True)
        ifDetrend - Boolean, will remove linear trend from data (True)
        ifDemean  - Boolean, will insult the data (True)
        scale_factor - scale all data by one value (10.0**2)
        This usually puts the data in the units required by CAP
        From m/s to cm/s
        pre_filt  - list, corner frequencies of filter to apply before deconv
        a good idea when deconvolving (ifRemoveResponse=True)
        """
        # DEFAULT SETTINGS
        self.ifph5 = False  # PH5 data format from PASSCAL (Denali nodal data)
        self.client_name = 'IRIS'  # IRIS, LLNL, NCEDC
        # idb = database index: OBSOLETE -- use client_name instead
        self.idb = None  # =1-IRIS (default); =2-AEC; =3-LLNL; =4-Geoscope

        # event parameters
        self.use_catalog = 1  # =1: use an existing catalog (=1); =0: specify your own event parameters (see iex=9)
        self.sec_before_after_event = 10  # time window to search for a target event in a catalog
        self.tbefore_sec = 100
        self.tafter_sec = 300
        # These are used only if self.use_catalog = 0
        self.evname = None
        self.otime = None
        self.elat = None
        self.elon = None
        self.edep = None
        self.emag = None
        # Refernce origin (dummy values)
        self.rlat = None
        self.rlon = None
        self.rtime = None
        # event objects
        self.ev = Event()
        self.ref_time_place = Event()

        # station parameters
        self.network = '*'  # all networks
        self.station = '*'  # all stations
        #self.station = '*,-PURD,-NV33,-GPO'  # all stations except -(these)
        self.channel = '*'  # all channels
        self.location = '*'  # all locations
        self.min_dist = 0
        self.max_dist = 20000
        self.min_az = 0
        self.max_az = 360
        self.min_lat = None
        self.max_lat = None
        self.min_lon = None
        self.max_lon = None
        self.overwrite_ddir = 1  # 1 = delete data directory if it already exists
        self.icreateNull = 0  # create Null traces so that rotation can work (obspy stream.rotate require 3 traces)
        # this might be helpful if missing a vertical component only
        self.phase_window = False  # Grab waveforms using phases #WARNING this will cut the waveform to be near the phase arrival
        self.phases = ["P",
                       "P"]  # Phases to write to sac files or grab data from
        self.write_sac_phase = False  # put phase information in sac files
        self.taupmodel = "ak135"
        # Filter parameters
        self.ifFilter = False
        #------Filter--------------
        # for 'bandpass' both f1 and f2 are used
        # for 'lowpass' only f2 is used
        # for 'highpass' only f1 is used
        #
        # EXAMPLES
        #                                   ifFilter  zerophase  remove_response  ipre_filt
        # A. CAP-ready waveforms [DEFAULT]: False     NA         True             1
        # B. plot-ready waveforms, acausal: True      True       True             2
        # C. plot-ready, causal waveforms:  True      False      True             0
        # D. possible sensor issues:        True      False      False            NA
        #
        self.filter_type = 'bandpass'
        # f1 should consider the requested length of the time series
        # f2 should consider the sampling rate for the desired channels
        self.f1 = 1 / 40  # fmin - highpass will keep frequencies larger than fmin
        self.f2 = 1 / 10  # fmax - lowpass will keep frequencies lower than fmax
        self.zerophase = True  # = False (causal/one-pass), = True (acausal/two-pass)
        # 4 pole filter is more sharper at the edges than 2 pole
        self.corners = 4  # Is corner in Obspy same as Pole in SAC?

        # Pre-filter parameters
        self.ipre_filt = 1  # =0 No pre_filter
        # =1 default pre_filter (see getwaveform_iris.py)
        # =2 user-defined pre_filter (use this if you are using bandpass filter)
        # For tapering down the pre-filter
        # Perhaps you want to set ipre_filt = 0 to prevent extra filtering
        # pre-filter for deconvolution
        # https://ds.iris.edu/files/sac-manual/commands/transfer.html
        # Pre-filter will not be applied if remove_response = False
        self.f0 = 0.5 * self.f1
        self.f3 = 2.0 * self.f2
        self.pre_filt = (self.f0, self.f1, self.f2, self.f3
                         )  # applies for ipre_filt = 2 only
        # self.pre_filt = (0.005, 0.006, 10.0, 15.0) # BH default
        self.water_level = 60

        # For CAP
        self.resample_TF = False  # if False then resample_freq is taken from SAC files
        self.resample_freq = 50  # 0 causes errors. Use resample_TF instead
        self.scale_factor = 1  # for CAP use 10**2  (to convert m/s to cm/s)

        # Pre-processing (mainly for CAP)
        self.output_cap_weight_file = True  # output cap weight files
        self.detrend = True  # detrend waveforms
        self.demean = True  # demean waveforms
        self.taper = False  # this could also be a fraction between 0 and 1 (fraction to be tapered from both sides)
        self.output_event_info = True  # output event info file
        self.outformat = 'VEL'  # Instrument-response-removed waveforms saved as VEL, DISP, or ACC
        self.ifsave_sacpaz = False  # save sac pole zero (needed as input for MouseTrap module)
        self.remove_response = True  # remove instrument response
        self.iplot_response = False  # plot response function
        self.ifplot_spectrogram = False  # plot spectrograms
        self.ifsave_stationxml = True  # save station xml file (for adjoint tomo)

        # options for rotation and for writing sac files
        self.rotateRTZ = True  # Rotate and save the RTZ components
        self.rotateUVW = False  # Rotate and save the UVW components
        self.isave_raw = False  # save raw waveforms
        self.isave_raw_processed = True  # save processed waveforms just before rotation to ENZ
        self.rotateENZ = True  # rotate extracted waveforms to ENZ
        self.isave_ENZ = True  # save ENZ

        # username and password for embargoed IRIS data
        # Register here: http://ds.iris.edu/ds/nodes/dmc/forms/restricted-data-registration/
        self.user = None
        self.password = None

        # To output lots of processing info
        self.ifverbose = True

        # save RTZ as asdf files
        self.ifsave_asdf = False

        # Use mass downloader instead
        self.ifmass_downloader = False

        self.remove_clipped = False
        if self.remove_clipped is True:
            self.isave_raw = True
Example #16
0
def _internal_read_single_scardec(buf):
    """
    Reads a single SCARDEC file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the origin time and epicenter
    line = buf.readline()

    origin_time = line.strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[6:]
    latitude, longitude = map(float, line[:2])

    # The second line encodes depth and the two focal mechanisms
    line = buf.readline()
    line = line.split()

    # First three values are depth, scalar moment (in Nm) and moment magnitude
    depth, scalar_moment, moment_mag = map(float, line[0:3])

    # depth is in km in SCARDEC files
    depth *= 1e3

    # Next six values are strike, dip, rake for both planes
    strike1, dip1, rake1 = map(float, line[3:6])
    strike2, dip2, rake2 = map(float, line[6:9])

    # The rest of the file is the moment rate function
    # In each line: time (sec), moment rate (Nm/sec)
    stf_time = []
    stf_mr = []
    for line in buf:
        stf_time.append(float(line.split()[0]))
        stf_mr.append(float(line.split()[1]))

    # Normalize the source time function
    stf_mr = np.array(stf_mr)
    stf_mr /= scalar_moment

    # Calculate the time step
    dt = np.mean(np.diff(stf_time))

    # Calculate the stf offset (time of first sample wrt to origin time)
    offset = stf_time[0]

    # event name is set to generic value for now
    event_name = 'SCARDEC_event'

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        depth=depth,
        origin_type="centroid",
        region=_fe.get_region(longitude=longitude,
                              latitude=latitude)
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        mag=moment_mag,
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    nod1 = NodalPlane(strike=strike1, dip=dip1, rake=rake1)
    nod2 = NodalPlane(strike=strike2, dip=dip2, rake=rake2)
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        nodal_planes=nod
    )

    dip1 *= np.pi / 180.
    rake1 *= np.pi / 180.
    strike1 *= np.pi / 180.

    mxx = - scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                              np.sin(2 * strike1)) +
                             (np.sin(2 * dip1) * np.sin(rake1) *
                              np.sin(2 * strike1)))
    mxy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.cos(2 * strike1)) +
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.sin(2 * strike1) * 0.5))
    myy = scalar_moment * ((np.sin(dip1) * np.cos(rake1) *
                            np.sin(2 * strike1)) -
                           (np.sin(2 * dip1) * np.sin(rake1) *
                            np.cos(2 * strike1)))
    mxz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                              np.cos(strike1)) +
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.sin(strike1)))
    myz = - scalar_moment * ((np.cos(dip1) * np.cos(rake1) *
                             np.sin(strike1)) -
                             (np.cos(2 * dip1) * np.sin(rake1) *
                              np.cos(strike1)))
    mzz = scalar_moment * (np.sin(2 * dip1) * np.sin(rake1))

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)

    cm = [Comment(text="Basis system: North,East,Down \
                        (Jost and Herrmann 1989)")]
    cm[0].resource_id = _get_resource_id(event_name, 'comment', 'mt')
    cm.append(Comment(text="MT derived from focal mechanism, therefore \
                            constrained to pure double couple.",
                      force_resource_id=False))

    # Write moment rate function
    extra = {'moment_rate': {'value': stf_mr,
                             'namespace': r"http://test.org/xmlns/0.1"},
             'dt': {'value': dt,
                    'namespace': r"http://test.org/xmlns/0.1"},
             'offset': {'value': offset,
                        'namespace': r"http://test.org/xmlns/0.1"}
             }

    # Source time function
    stf = SourceTimeFunction(type="unknown")
    stf.extra = extra

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        scalar_moment=scalar_moment,
        tensor=tensor,
        source_time_function=stf,
        comments=cm
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: SCARDEC",
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.magnitudes.append(cmt_mag)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    ev.scope_resource_ids()

    return ev
Example #17
0
def par2quakeml(Par_filename,
                QuakeML_filename,
                rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5,
                origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):

    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data

    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr**2 + Mtt**2 + Mpp**2 + Mtr**2 +
                                            Mpr**2 + Mtp**2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude

    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file

    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Example #18
0
def request_gcmt(starttime,
                 endtime,
                 minmagnitude=None,
                 mindepth=None,
                 maxdepth=None,
                 minlatitude=None,
                 maxlatitude=None,
                 minlongitude=None,
                 maxlongitude=None):
    from mechanize import Browser
    import re
    """
    Description
    I am using mechanize. My attempt is just preliminary, for the current
    globalcmt.org site. It is possible to store Moment Tensor information
    in the catalog file.
    """

    # Split numbers and text
    r = re.compile("([a-zA-Z]+)([0-9]+)")
    br = Browser()
    br.open('http://www.globalcmt.org/CMTsearch.html')
    # Site has just one form
    br.select_form(nr=0)

    br.form['yr'] = str(starttime.year)
    br.form['mo'] = str(starttime.month)
    br.form['day'] = str(starttime.day)
    br.form['oyr'] = str(endtime.year)
    br.form['omo'] = str(endtime.month)
    br.form['oday'] = str(endtime.day)
    br.form['list'] = ['4']
    br.form['itype'] = ['ymd']
    br.form['otype'] = ['ymd']

    if minmagnitude:
        br.form['lmw'] = str(minmagnitude)
    if minlatitude:
        br.form['llat'] = str(minlatitude)
    if maxlatitude:
        br.form['ulat'] = str(maxlatitude)
    if minlongitude:
        br.form['llon'] = str(minlongitude)
    if maxlongitude:
        br.form['ulon'] = str(maxlongitude)
    if mindepth:
        br.form['lhd'] = str(mindepth)
    if maxdepth:
        br.form['uhd'] = str(maxdepth)

    print("Submitting parameters to globalcmt.org ")
    req = br.submit()
    print("Retrieving data, creating catalog.")

    data = []
    for line in req:
        data.append(line)

    data_chunked = _chunking_list(keyword='\n', list=data)
    origins = []
    magnitudes = []
    tensor = []

    for line in data_chunked:
        for element in line:
            if 'event name' in element:
                try:
                    org = line[1].split()
                    year = int(r.match(org[0]).groups()[1])
                    mon = int(org[1])
                    day = int(org[2])
                    hour = int(org[3])
                    minute = int(org[4])
                    sec_temp = int(org[5].split('.')[0])
                    msec_temp = int(org[5].split('.')[1])

                except:
                    org = line[1].split()
                    year = int(org[1])
                    mon = int(org[2])
                    day = int(org[3])
                    hour = int(org[4])
                    minute = int(org[5])
                    sec_temp = int(org[6].split('.')[0])
                    msec_temp = int(org[6].split('.')[1])

                origins_temp = UTCDateTime(year, mon, day, hour, minute,
                                           sec_temp, msec_temp)
                # adding time shift located in line[3]
                origin = origins_temp + float(line[3].split()[2])
                magnitude = float(line[1].split()[10])
                latitude = float(line[5].split()[1])
                longitude = float(line[6].split()[1])
                depth = 1000. * float(line[7].split()[1])
                m_rr = float(line[8].split()[1])
                m_tt = float(line[9].split()[1])
                m_pp = float(line[10].split()[1])
                m_rt = float(line[11].split()[1])
                m_rp = float(line[12].split()[1])
                m_tp = float(line[13].split()[1])

                magnitudes.append(("Mw", magnitude))
                origins.append((latitude, longitude, depth, origin))
                tensor.append((m_rr, m_tt, m_pp, m_rt, m_rp, m_tp))

    cat = Catalog()

    for mag, org, ten in zip(magnitudes, origins, tensor):
        # Create magnitude object.
        magnitude = Magnitude()
        magnitude.magnitude_type = mag[0]
        magnitude.mag = mag[1]
        # Write origin object.
        origin = Origin()
        origin.latitude = org[0]
        origin.longitude = org[1]
        origin.depth = org[2]
        origin.time = org[3]
        # Create event object and append to catalog object.
        event = Event()
        event.magnitudes.append(magnitude)
        event.origins.append(origin)

        event.MomentTensor = MomentTensor()
        event.MomentTensor.m_rr = ten[0]
        event.MomentTensor.m_tt = ten[1]
        event.MomentTensor.m_pp = ten[2]
        event.MomentTensor.m_rt = ten[3]
        event.MomentTensor.m_rp = ten[4]
        event.MomentTensor.m_tp = ten[5]

        cat.append(event)

    return cat
Example #19
0
    def test_real_time_plotting(self):
        """Test the real-time plotter - must be run interactively."""

        seed_list = [
            "NZ.INZ.10.HHZ", "NZ.JCZ.10.HHZ", "NZ.FOZ.11.HHZ", "NZ.MSZ.10.HHZ",
            "NZ.PYZ.10.HHZ", "NZ.DCZ.10.HHZ", "NZ.WVZ.10.HHZ"
        ]
        client = Client("GEONET")
        inv = client.get_stations(network=seed_list[0].split(".")[0],
                                  station=seed_list[0].split(".")[1],
                                  location=seed_list[0].split(".")[2],
                                  channel=seed_list[0].split(".")[3])
        for seed_id in seed_list[1:]:
            net, sta, loc, chan = seed_id.split('.')
            inv += client.get_stations(network=net,
                                       station=sta,
                                       channel=chan,
                                       location=loc)

        template_cat = client.get_events(starttime=UTCDateTime(2020, 3, 25),
                                         endtime=UTCDateTime(2020, 3, 26))
        tribe = Tribe(templates=[
            Template(event=event, name=event.resource_id.id.split("/")[-1])
            for event in template_cat
        ])
        template_names = cycle([t.name for t in tribe])

        buffer_capacity = 1200
        rt_client = RealTimeClient(server_url="link.geonet.org.nz",
                                   buffer_capacity=buffer_capacity)
        for seed_id in seed_list:
            net, station, _, selector = seed_id.split(".")
            rt_client.select_stream(net=net,
                                    station=station,
                                    selector=selector)

        rt_client.background_run()
        while len(rt_client.stream) < 7:
            # Wait until we have some data
            time.sleep(SLEEP_STEP)

        detections = []
        plotter = EQcorrscanPlot(rt_client=rt_client,
                                 plot_length=60,
                                 tribe=tribe,
                                 inventory=inv,
                                 update_interval=1000,
                                 detections=detections)
        plotter.background_run()

        duration = 0
        step = 5
        while duration < MAX_DURATION:
            detections.append(
                Detection(
                    template_name=next(template_names),
                    detect_time=UTCDateTime.now(),
                    no_chans=999,
                    detect_val=999,
                    threshold=999,
                    threshold_type="MAD",
                    threshold_input=999,
                    typeofdet="unreal",
                    event=Event(picks=[
                        Pick(time=UTCDateTime.now(),
                             waveform_id=WaveformStreamID(seed_string=seed_id))
                        for seed_id in seed_list
                    ])))
            time.sleep(step)
            duration += step
        rt_client.background_stop()
Example #20
0
def _internal_read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:4].strip().decode()

    origin_time = line[4:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[7:]
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary"
    )

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag, magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag, magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = ["time_shift", "half_duration", "latitude", "longitude",
              "depth", "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    cmt_values = {_i: float(buf.readline().strip().split()[-1])
                  for _i in values}

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"] ** 2 +
        cmt_values["m_tt"] ** 2 +
        cmt_values["m_pp"] ** 2 +
        2.0 * cmt_values["m_rt"] ** 2 +
        2.0 * cmt_values["m_rp"] ** 2 +
        2.0 * cmt_values["m_tp"] ** 2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id
    )

    tensor = Tensor(
        m_rr=cmt_values["m_rr"],
        m_pp=cmt_values["m_pp"],
        m_tt=cmt_values["m_tt"],
        m_rt=cmt_values["m_rt"],
        m_rp=cmt_values["m_rp"],
        m_tp=cmt_values["m_tp"]
    )

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"]
    )

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: %s" % hypocenter_catalog,
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Example #21
0
 def event(self):
     origin = Origin(time=self.time, latitude=47, longitude=-111.7)
     return Event(origins=[origin])
Example #22
0
def _read_evt(filename, inventory=None, id_map=None, id_default='.{}..{}',
              encoding='utf-8'):
    """
    Read a SeismicHandler EVT file and returns an ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.read_events` function, call this
        instead.

    :type filename: str
    :param filename: File or file-like object in text mode.
    :type inventory: :class:`~obspy.core.inventory.inventory.Inventory`
    :param inventory: Inventory used to retrieve network code, location code
        and channel code of stations (SEED id).
    :type id_map: dict
    :param id_map: If channel information was not found in inventory,
        it will be looked up in this dictionary
        (example: `id_map={'MOX': 'GR.{}..HH{}'`).
        The values must contain three dots and two `{}` which are
        substituted by station code and component.
    :type id_default: str
    :param id_default: Default SEED id expression.
        The value must contain three dots and two `{}` which are
        substituted by station code and component.
    :param str encoding: encoding used (default: utf-8)

    :rtype: :class:`~obspy.core.event.Catalog`
    :return: An ObsPy Catalog object.

    .. note::
        The following fields are supported by this function: %s.

        Compare with http://www.seismic-handler.org/wiki/ShmDocFileEvt
    """
    seed_map = _seed_id_map(inventory, id_map)
    with io.open(filename, 'r', encoding=encoding) as f:
        temp = f.read()
    # first create phases and phases_o dictionaries for different phases
    # and phases with origin information
    phases = defaultdict(list)
    phases_o = {}
    phase = {}
    evid = None
    for line in temp.splitlines():
        if 'End of Phase' in line:
            if 'origin time' in phase.keys():
                if evid in phases_o:
                    # found more than one origin
                    pass
                phases_o[evid] = phase
            phases[evid].append(phase)
            phase = {}
            evid = None
        elif line.strip() != '':
            try:
                key, value = line.split(':', 1)
            except ValueError:
                continue
            key = key.strip().lower()
            value = value.strip()
            if key == 'event id':
                evid = value
            elif value != '':
                phase[key] = value
    assert evid is None

    # now create obspy Events from phases and phases_o dictionaries
    events = []
    for evid in phases:
        picks = []
        arrivals = []
        stamags = []
        origins = []
        po = None
        magnitudes = []
        pm = None
        for p in phases[evid]:
            sta = p.get('station code', '')
            comp = p.get('component', '')
            wid = seed_map.get(sta, id_default)
            wid = WaveformStreamID(seed_string=wid.format(sta, comp))
            pick = Pick(waveform_id=wid, **_kw(p, 'pick'))
            arrival = Arrival(pick_id=pick.resource_id, **_kw(p, 'arrival'))
            picks.append(pick)
            arrivals.append(arrival)
            stamags_temp, _ = _mags(p, evid, stamag=True, wid=wid)
            stamags.extend(stamags_temp)
        if evid in phases_o:
            o = phases_o[evid]
            uncertainty = OriginUncertainty(**_kw(o, 'origin_uncertainty'))
            origin = Origin(arrivals=arrivals, origin_uncertainty=uncertainty,
                            **_kw(o, 'origin'))
            if origin.latitude is None or origin.longitude is None:
                warn('latitude or longitude not set for event %s' % evid)
            else:
                if origin.longitude_errors.uncertainty is not None:
                    origin.longitude_errors.uncertainty *= cos(
                        origin.latitude / 180 * pi)
                origins = [origin]
                po = origin.resource_id
            magnitudes, pm = _mags(o, evid)
        else:
            o = p
        event = Event(resource_id=ResourceIdentifier(evid),
                      picks=picks,
                      origins=origins,
                      magnitudes=magnitudes,
                      station_magnitudes=stamags,
                      preferred_origin_id=po,
                      preferred_magnitude_id=pm,
                      **_kw(o, 'event')
                      )
        events.append(event)
    return Catalog(events,
                   description='Created from SeismicHandler EVT format')
# - a (modified) Catalog can be output to file (currently there is write support for QuakeML only)

largest_magnitude_events.write("/tmp/large_events.xml", format="QUAKEML")
# !ls -l /tmp/large_events.xml

# - the event type classes can be used to build up Events/Catalogs/Picks/.. from scratch in custom processing work flows and to share them with other researchers in the de facto standard format QuakeML

# +
from obspy import UTCDateTime
from obspy.core.event import Catalog, Event, Origin, Magnitude
from obspy.geodetics import FlinnEngdahl

# cat = Catalog()
cat.description = "Just a fictitious toy example catalog built from scratch"

e = Event()
e.event_type = "not existing"

o = Origin()
o.time = UTCDateTime(2014, 2, 23, 18, 0, 0)
o.latitude = 47.6
o.longitude = 12.0
o.depth = 10000
o.depth_type = "operator assigned"
o.evaluation_mode = "manual"
o.evaluation_status = "preliminary"
o.region = FlinnEngdahl().get_region(o.longitude, o.latitude)

m = Magnitude()
m.mag = 7.2
m.magnitude_type = "Mw"
Example #24
0
def sactoevent(st, debug=0):
    """
    Convert SAC headers (picks only) to obspy event class.

    Picks are taken from header values a, t[0-9].

    :type st: obspy.core.stream.Stream
    :param st: Stream of waveforms including SAC headers.
    :type debug: int
    :param debug: Debug level, larger number = more output.

    :returns: Event with picks taken from SAC headers.
    :rtype: :class:`obspy.core.event.event.Event`

    .. note::
        This functionality is not supported for obspy versions below 1.0.0 as
        reference times are not read in by SACIO, which are needed for defining
        pick times.

    .. note::
        Takes the event origin information from the first trace in the
        stream - to ensure this works as you expect, please populate the
        evla, evlo, evdp and nzyear, nzjday, nzhour, nzmin, nzsec, nzmsec
        for all traces with the same values.

    >>> from obspy import read
    >>> # Get the path to the test data
    >>> import eqcorrscan
    >>> import os
    >>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
    >>> st = read(TEST_PATH + '/SAC/2014p611252/*')
    >>> event = sactoevent(st)
    >>> print(event.origins[0].time)
    2014-08-15T03:55:21.057000Z
    >>> print(event.picks[0].phase_hint)
    S
    """
    # Check the version
    _version_check()
    # Set the default SAC nan values
    float_nan = -12345.0

    if not isinstance(st, Stream):
        msg = ('st must be a stream object, if you have just read in ' +
               'multiple SAC files you may have a list of streams, convert ' +
               'using: st = Stream([tr[0] for tr in st])')
        raise ValueError(msg)
        # Note, don't do this internally as we need to ensure that we are not
        # taking traces from other events, the user should check this.
    for tr in st:
        if not tr.stats._format == 'SAC':
            msg = ('%s.%s is not SAC formatted.' %
                   (tr.stats.station, tr.stats.channel))
            raise ValueError(msg)
    # Now we need to create an event!
    event = Event()
    event.origins.append(Origin())
    # print(st[0].stats.sac.keys())
    event.origins[0].time = UTCDateTime(year=st[0].stats.sac.nzyear,
                                        julday=st[0].stats.sac.nzjday,
                                        hour=st[0].stats.sac.nzhour,
                                        minute=st[0].stats.sac.nzmin,
                                        second=st[0].stats.sac.nzsec,
                                        microsecond=st[0].stats.sac.nzmsec *
                                        1000)
    try:
        event.origins[0].latitude = st[0].stats.sac.evla
        event.origins[0].longitude = st[0].stats.sac.evlo
        event.origins[0].depth = st[0].stats.sac.evdp
        # Catch filled with 12345.0 as nan
        if event.origins[0].latitude == float_nan:
            event.origins[0].latitude = None
        if event.origins[0].longitude == float_nan:
            event.origins[0].longitude = None
        if event.origins[0].depth == float_nan:
            event.origins[0].depth = None
    except KeyError:
        event.origins[0].latitude = None
        event.origins[0].longitude = None
        event.origins[0].depth = None
    except AttributeError:
        event.origins[0].latitude = None
        event.origins[0].longitude = None
        event.origins[0].depth = None

    # Add in the picks
    for tr in st:
        reference_time = UTCDateTime(year=tr.stats.sac.nzyear,
                                     julday=tr.stats.sac.nzjday,
                                     hour=tr.stats.sac.nzhour,
                                     minute=tr.stats.sac.nzmin,
                                     second=tr.stats.sac.nzsec,
                                     microsecond=tr.stats.sac.nzmsec * 1000)
        # Possible pick locations are in the t[0-9] slot
        for pick_number in range(10):
            pick_key = 't' + str(pick_number)
            phase_key = 'kt' + str(pick_number)
            try:
                if tr.stats.sac[pick_key] == float_nan:
                    # in version 0.10.2 and before. rather than not include
                    # the keys, the variables are filled with SAC nans.
                    if debug > 1:
                        msg = 'No pick in position ' + pick_key + \
                            ' for trace: ' + tr.stats.station + '.' + \
                            tr.stats.channel
                        warnings.warn(msg)
                    continue
                pick_time = reference_time + tr.stats.sac[pick_key]
                phase_hint = tr.stats.sac[phase_key].split()[0]
            except KeyError:
                if debug > 1:
                    msg = 'No pick in position ' + pick_key + ' for trace: ' +\
                        tr.stats.station + '.' + tr.stats.channel
                    warnings.warn(msg)
                continue
            if debug > 0:
                msg = 'Found pick in position ' + pick_key + ' for trace: ' +\
                    tr.stats.station + '.' + tr.stats.channel
                print(msg)
            waveform_id = WaveformStreamID(station_code=tr.stats.station,
                                           network_code=tr.stats.network,
                                           channel_code=tr.stats.channel)
            pick = Pick(waveform_id=waveform_id,
                        phase_hint=phase_hint,
                        time=pick_time)
            event.picks.append(pick)
        # Also check header slots 'a' and 'ka'
        try:
            if tr.stats.sac['a'] == float_nan:
                if debug > 1:
                    msg = 'No pick in position ' + pick_key + \
                        ' for trace: ' + tr.stats.station + '.' + \
                        tr.stats.channel
                    warnings.warn(msg)
                continue
            pick_time = reference_time + tr.stats.sac['a']
            phase_hint = tr.stats.sac['ka'].split()[0]
        except KeyError:
            if debug > 1:
                msg = 'No pick in position ' + pick_key + ' for trace: ' +\
                    tr.stats.station + '.' + tr.stats.channel
                warnings.warn(msg)
            continue
        if debug > 0:
            msg = 'Found pick in position a for trace: ' +\
                tr.stats.station + '.' + tr.stats.channel
            print(msg)
        waveform_id = WaveformStreamID(station_code=tr.stats.station,
                                       network_code=tr.stats.network,
                                       channel_code=tr.stats.channel)
        pick = Pick(waveform_id=waveform_id,
                    phase_hint=phase_hint,
                    time=pick_time)
        event.picks.append(pick)

    return event
Example #25
0
def _channel_loop(detection,
                  template,
                  min_cc,
                  detection_id,
                  interpolate,
                  i,
                  pre_lag_ccsum=None,
                  detect_chans=0,
                  horizontal_chans=['E', 'N', '1', '2'],
                  vertical_chans=['Z'],
                  debug=0):
    """
    Inner loop for correlating and assigning picks.

    Utility function to take a stream of data for the detected event and write
    maximum correlation to absolute time as picks in an obspy.core.event.Event
    object.
    Only outputs picks for picks above min_cc.

    :type detection: obspy.core.stream.Stream
    :param detection:
        Stream of data for the slave event detected using template.
    :type template: obspy.core.stream.Stream
    :param template: Stream of data as the template for the detection.
    :type min_cc: float
    :param min_cc: Minimum cross-correlation value to allow a pick to be made.
    :type detection_id: str
    :param detection_id: Detection ID to associate the event with.
    :type interpolate: bool
    :param interpolate:
        Interpolate the correlation function to achieve sub-sample precision.
    :type i: int
    :param i:
        Used to track which process has occurred when running in parallel.
    :type pre_lag_ccsum: float
    :param pre_lag_ccsum:
        Cross-correlation sum before lag-calc, will check that the
        cross-correlation sum is increased by lag-calc (using all channels,
        ignoring min_cc)
    :type detect_chans: int
    :param detect_chans:
        Number of channels originally used in detections, must match the number
        used here to allow for cccsum checking.
    :type horizontal_chans: list
    :param horizontal_chans:
        List of channel endings for horizontal-channels, on which S-picks will
        be made.
    :type vertical_chans: list
    :param vertical_chans:
        List of channel endings for vertical-channels, on which P-picks will
        be made.
    :type debug: int
    :param debug: Debug output level 0-5.

    :returns:
        Event object containing network, station, channel and pick information.
    :rtype: :class:`obspy.core.event.Event`
    """
    from eqcorrscan.core.match_filter import normxcorr2
    import math
    event = Event()
    s_stachans = {}
    cccsum = 0
    checksum = 0
    used_chans = 0
    for tr in template:
        temp_net = tr.stats.network
        temp_sta = tr.stats.station
        temp_chan = tr.stats.channel
        debug_print('Working on: %s.%s.%s' % (temp_net, temp_sta, temp_chan),
                    3, debug)
        image = detection.select(station=temp_sta, channel=temp_chan)
        if len(image) == 0 or sum(image[0].data) == 0:
            print('No match in image.')
            continue
        if interpolate:
            try:
                ccc = normxcorr2(tr.data, image[0].data)
            except Exception:
                print('Could not calculate cc')
                print('Image is %i long' % len(image[0].data))
                print('Template is %i long' % len(tr.data))
                continue
            try:
                shift, cc_max = _xcorr_interp(ccc=ccc, dt=image[0].stats.delta)
            except IndexError:
                print('Could not interpolate ccc, not smooth')
                ccc = normxcorr2(tr.data, image[0].data)
                cc_max = np.amax(ccc)
                shift = np.argmax(ccc) * image[0].stats.delta
            # Convert the maximum cross-correlation time to an actual time
            if math.isnan(cc_max):
                print('Problematic trace, no cross correlation possible')
                continue
            else:
                picktime = image[0].stats.starttime + shift
        else:
            # Convert the maximum cross-correlation time to an actual time
            try:
                ccc = normxcorr2(tr.data, image[0].data)
            except Exception:
                print('Could not calculate cc')
                print('Image is %i long' % len(image[0].data))
                print('Template is %i long' % len(tr.data))
                continue
            cc_max = np.amax(ccc)
            if math.isnan(cc_max):
                print('Problematic trace, no cross correlation possible')
                continue
            else:
                picktime = image[0].stats.starttime + (np.argmax(ccc) *
                                                       image[0].stats.delta)
        debug_print('Maximum cross-corr=%s' % cc_max, 3, debug)
        checksum += cc_max
        used_chans += 1
        if cc_max < min_cc:
            debug_print('Correlation below threshold, not used', 3, debug)
            continue
        cccsum += cc_max
        # Perhaps weight each pick by the cc val or cc val^2?
        # weight = np.amax(ccc) ** 2
        if temp_chan[-1] in vertical_chans:
            phase = 'P'
        # Only take the S-pick with the best correlation
        elif temp_chan[-1] in horizontal_chans:
            phase = 'S'
            debug_print(
                'Making S-pick on: %s.%s.%s' % (temp_net, temp_sta, temp_chan),
                4, debug)
            if temp_sta not in s_stachans.keys():
                s_stachans[temp_sta] = ((temp_chan, np.amax(ccc), picktime))
            elif temp_sta in s_stachans.keys():
                if np.amax(ccc) > s_stachans[temp_sta][1]:
                    picktime = picktime
                else:
                    continue
        else:
            phase = None
        _waveform_id = WaveformStreamID(network_code=temp_net,
                                        station_code=temp_sta,
                                        channel_code=temp_chan)
        event.picks.append(
            Pick(waveform_id=_waveform_id,
                 time=picktime,
                 method_id=ResourceIdentifier('EQcorrscan'),
                 phase_hint=phase,
                 creation_info='eqcorrscan.core.lag_calc',
                 evaluation_mode='automatic',
                 comments=[Comment(text='cc_max=%s' % cc_max)]))
        event.resource_id = detection_id
    ccc_str = ("detect_val=%s" % cccsum)
    event.comments.append(Comment(text=ccc_str))
    if used_chans == detect_chans:
        if pre_lag_ccsum is not None and\
           checksum - pre_lag_ccsum < -(0.3 * pre_lag_ccsum):
            msg = ('lag-calc has decreased cccsum from %f to %f - ' %
                   (pre_lag_ccsum, checksum))
            raise LagCalcError(msg)
    else:
        warnings.warn('Cannot check if cccsum is better, used %i channels '
                      'for detection, but %i are used here' %
                      (detect_chans, used_chans))
    return i, event
Example #26
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier("smi:hash/Origin/{0}".format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier("smi:hash/Pick/{0}".format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(
                network_code=hp.snet[_i],
                station_code=hp.sname[_i],
                channel_code=hp.scomp[_i],
            )
            if hp.p_pol[_i] > 0:
                p.polarity = "positive"
            else:
                p.polarity = "negative"
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier("smi:hash/Arrival/{0}".format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180.0 - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = str(origin.resource_id)
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            "smi:hash/FocalMechanism/{0}/{1}".format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier("HASH")
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax["T"]["azimuth"],
                                                plunge=ax["T"]["dip"])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax["P"]["azimuth"],
                                                plunge=ax["P"]["dip"])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    str(focal_mech.resource_id) + "/comment/quality"),
            ))
        # ----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event
Example #27
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Example #28
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #29
0
def brightness(stations,
               nodes,
               lags,
               stream,
               threshold,
               thresh_type,
               template_length,
               template_saveloc,
               coherence_thresh,
               coherence_stations=['all'],
               coherence_clip=False,
               gap=2.0,
               clip_level=100,
               instance=0,
               pre_pick=0.2,
               plotsave=True,
               cores=1):
    r"""Function to calculate the brightness function in terms of energy for \
    a day of data over the entire network for a given grid of nodes.

    Note data in stream must be all of the same length and have the same
    sampling rates.

    :type stations: list
    :param stations: List of station names from in the form where stations[i] \
        refers to nodes[i][:] and lags[i][:]
    :type nodes: list, tuple
    :param nodes: List of node points where nodes[i] referes to stations[i] \
        and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \
        longitude in degrees, nodes[:][:][2] is depth in km.
    :type lags: :class: 'numpy.array'
    :param lags: Array of arrays where lags[i][:] refers to stations[i]. \
        lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \
        seconds.
    :type stream: :class: `obspy.Stream`
    :param data: Data through which to look for detections.
    :type threshold: float
    :param threshold: Threshold value for detection of template within the \
        brightness function
    :type thresh_type: str
    :param thresh_type: Either MAD or abs where MAD is the Median Absolute \
        Deviation and abs is an absoulte brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple of floats
    :param coherence_thresh: Threshold for removing incoherant peaks in the \
            network response, those below this will not be used as templates. \
            Must be in the form of (a,b) where the coherence is given by: \
            a-kchan/b where kchan is the number of channels used to compute \
            the coherence
    :type coherence_stations: list
    :param coherence_stations: List of stations to use in the coherance \
            thresholding - defaults to 'all' which uses all the stations.
    :type coherence_clip: float
    :param coherence_clip: tuple
    :type coherence_clip: Start and end in seconds of data to window around, \
            defaults to False, which uses all the data given.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotsave: bool
    :param plotsave: Save or show plots, if False will try and show the plots \
            on screen - as this is designed for bulk use this is set to \
            True to save any plots rather than show them if you create \
            them - changes the backend of matplotlib, so if is set to \
            False you will see NO PLOTS!
    :type cores: int
    :param core: Number of cores to use, defaults to 1.
    :type clip_level: float
    :param clip_level: Multiplier applied to the mean deviation of the energy \
                    as an upper limit, used to remove spikes (earthquakes, \
                    lightning, electircal spikes) from the energy stack.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections

    :return: list of templates as :class: `obspy.Stream` objects
    """
    from eqcorrscan.core.template_gen import _template_gen
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    # from joblib import Parallel, delayed
    from multiprocessing import Pool, cpu_count
    from copy import deepcopy
    from obspy import read as obsread
    from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin
    from obspy.core.event import EventDescription, CreationInfo, Comment
    import obspy.Stream
    import matplotlib.pyplot as plt
    from eqcorrscan.utils import plotting
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to converse memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise OverflowError('Too many streams, either re-code and cope with' +
                            'either more memory usage, or less precision, or' +
                            'reduce data volume')
    detections = []
    detect_lags = []
    parallel = True
    plotvar = True
    mem_issue = False
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    if not parallel:
        for i in range(0, len(nodes)):
            print(i)
            if not mem_issue:
                j, a = _node_loop(stations, lags[:, i], stream, plot=True)
                if 'energy' not in locals():
                    energy = a
                else:
                    energy = np.concatenate((energy, a), axis=0)
                print('energy: ' + str(np.shape(energy)))
            else:
                j, filename = _node_loop(stations, lags[:, i], stream, i,
                                         mem_issue)
        energy = np.array(energy)
        print(np.shape(energy))
    else:
        # Parallel run
        num_cores = cores
        if num_cores > len(nodes):
            num_cores = len(nodes)
        if num_cores > cpu_count():
            num_cores = cpu_count()
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_node_loop,
                             args=(stations, lags[:, i], stream, i, clip_level,
                                   mem_issue, instance))
            for i in range(len(nodes))
        ]
        pool.close()
        if not mem_issue:
            print('Computing the cumulative network response from memory')
            energy = [p.get() for p in results]
            pool.join()
            energy.sort(key=lambda tup: tup[0])
            energy = [node[1] for node in energy]
            energy = np.concatenate(energy, axis=0)
            print(energy.shape)
        else:
            pool.join()
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indeces = np.argmax(energy, axis=0)  # Indeces of maximum energy
        print(indeces.shape)
        cum_net_resp = np.array([np.nan] * len(indeces))
        cum_net_resp[0] = energy[indeces[0]][0]
        peak_nodes = [nodes[indeces[0]]]
        for i in range(1, len(indeces)):
            cum_net_resp[i] = energy[indeces[i]][i]
            peak_nodes.append(nodes[indeces[i]])
        del energy, indeces
    else:
        print('Reading the temp files and computing network response')
        node_splits = int(len(nodes) // num_cores)
        indeces = [range(node_splits)]
        for i in range(1, num_cores - 1):
            indeces.append(range(node_splits * i, node_splits * (i + 1)))
        indeces.append(range(node_splits * (i + 1), len(nodes)))
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_cum_net_resp, args=(indeces[i], instance))
            for i in range(num_cores)
        ]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indeces = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indeces = np.argmax(cum_net_resp, axis=0)
        print(indeces.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array(
            [cum_net_resp[indeces[i]][i] for i in range(len(indeces))])
        peak_nodes = [
            nodes[node_indeces[indeces[i]][i]] for i in range(len(indeces))
        ]
        del indeces, node_indeces
    if plotvar:
        cum_net_trace = deepcopy(stream[0])
        cum_net_trace.data = cum_net_resp
        cum_net_trace.stats.station = 'NR'
        cum_net_trace.stats.channel = ''
        cum_net_trace.stats.network = 'Z'
        cum_net_trace.stats.location = ''
        cum_net_trace.stats.starttime = stream[0].stats.starttime
        cum_net_trace = obspy.Stream(cum_net_trace)
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])
        # np.save('cum_net_resp.npy',cum_net_resp)
        #     cum_net_trace.plot(size=(800,600), equal_scale=False,\
        #                        outfile='NR_timeseries.eps')

    # Find detection within this network response
    print('Finding detections in the cumulatve network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections in to templates')
        # Generate a catalog of detections
        detections_cat = Catalog()
        for j, detection in enumerate(detections):
            print('Converting for detection ' + str(j) + ' of ' +
                  str(len(detections)))
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            print(node)
            # Look up node in nodes and find the associated lags
            index = nodes.index(node)
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + len(detect_lags))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(
                            station_code=tr.stats.station,
                            channel_code=tr.stats.channel,
                            network_code='NA')
                        event.picks.append(
                            Pick(waveform_id=_waveform_id,
                                 time=tr.stats.starttime + detect_lag +
                                 detection.detect_time + pre_pick,
                                 onset='emergent',
                                 evalutation_mode='automatic'))
            print('Generating template for detection: ' + str(j))
            template = (_template_gen(event.picks, copy_of_stream,
                                      template_length, 'all'))
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherancy here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherant = True
            else:
                print('Template was incoherant, coherence level: ' +
                      str(temp_coher))
                coherant = False
            del copy_of_stream, tr, template
            if coherant:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            else:
                print('No template for you')
    if plotvar:
        all_detections = [(cum_net_trace[-1].stats.starttime +
                           detection.detect_time).datetime
                          for detection in detections]
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             save=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
Example #30
0
def _read_single_event(event_file, locate_dir, units, local_mag_ph):
    """
    Parse an event file from QuakeMigrate into an obspy Event object.

    Parameters
    ----------
    event_file : `pathlib.Path` object
        Path to .event file to read.
    locate_dir : `pathlib.Path` object
        Path to locate directory (contains "events", "picks" etc. directories).
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    local_mag_ph : {"S", "P"}
        Amplitude measurement used to calculate local magnitudes.

    Returns
    -------
    event : `obspy.Event` object
        Event object populated with all available information output by
        :class:`~quakemigrate.signal.scan.locate()`, including event locations
        and uncertainties, picks, and amplitudes and magnitudes if available.

    """

    # Parse information from event file
    event_info = pd.read_csv(event_file).iloc[0]
    event_uid = str(event_info["EventID"])

    # Set distance conversion factor (from units of QM LUT projection units).
    if units == "km":
        factor = 1e3
    elif units == "m":
        factor = 1
    else:
        raise AttributeError(f"units must be 'km' or 'm'; not {units}")

    # Create event object to store origin and pick information
    event = Event()
    event.extra = AttribDict()
    event.resource_id = str(event_info["EventID"])
    event.creation_info = CreationInfo(author="QuakeMigrate",
                                       version=quakemigrate.__version__)

    # Add COA info to extra
    event.extra.coa = {"value": event_info["COA"], "namespace": ns}
    event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns}
    event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns}
    event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns}
    event.extra.dec_coa_norm = {
        "value": event_info["DEC_COA_NORM"],
        "namespace": ns
    }

    # Determine location of cut waveform data - add to event object as a
    # custom extra attribute.
    mseed = locate_dir / "raw_cut_waveforms" / event_uid
    event.extra.cut_waveforms_file = {
        "value": str(mseed.with_suffix(".m").resolve()),
        "namespace": ns
    }
    if (locate_dir / "real_cut_waveforms").exists():
        mseed = locate_dir / "real_cut_waveforms" / event_uid
        event.extra.real_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }
    if (locate_dir / "wa_cut_waveforms").exists():
        mseed = locate_dir / "wa_cut_waveforms" / event_uid
        event.extra.wa_cut_waveforms_file = {
            "value": str(mseed.with_suffix(".m").resolve()),
            "namespace": ns
        }

    # Create origin with spline location and set to preferred event origin.
    origin = Origin()
    origin.method_id = "spline"
    origin.longitude = event_info["X"]
    origin.latitude = event_info["Y"]
    origin.depth = event_info["Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins = [origin]
    event.preferred_origin_id = origin.resource_id

    # Create origin with gaussian location and associate with event
    origin = Origin()
    origin.method_id = "gaussian"
    origin.longitude = event_info["GAU_X"]
    origin.latitude = event_info["GAU_Y"]
    origin.depth = event_info["GAU_Z"] * factor
    origin.time = UTCDateTime(event_info["DT"])
    event.origins.append(origin)

    ouc = OriginUncertainty()
    ce = ConfidenceEllipsoid()
    ce.semi_major_axis_length = event_info["COV_ErrY"] * factor
    ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor
    ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor
    ce.major_axis_plunge = 0
    ce.major_axis_azimuth = 0
    ce.major_axis_rotation = 0
    ouc.confidence_ellipsoid = ce
    ouc.preferred_description = "confidence ellipsoid"

    # Set uncertainties for both as the gaussian uncertainties
    for origin in event.origins:
        origin.longitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrX"] * factor / 1e3)
        origin.latitude_errors.uncertainty = kilometer2degrees(
            event_info["GAU_ErrY"] * factor / 1e3)
        origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor
        origin.origin_uncertainty = ouc

    # Add OriginQuality info to each origin?
    for origin in event.origins:
        origin.origin_type = "hypocenter"
        origin.evaluation_mode = "automatic"

    # --- Handle picks file ---
    pick_file = locate_dir / "picks" / event_uid
    if pick_file.with_suffix(".picks").is_file():
        picks = pd.read_csv(pick_file.with_suffix(".picks"))
    else:
        return None

    for _, pickline in picks.iterrows():
        station = str(pickline["Station"])
        phase = str(pickline["Phase"])
        wid = WaveformStreamID(network_code="", station_code=station)

        for method in ["modelled", "autopick"]:
            pick = Pick()
            pick.extra = AttribDict()
            pick.waveform_id = wid
            pick.method_id = method
            pick.phase_hint = phase
            if method == "autopick" and str(pickline["PickTime"]) != "-1":
                pick.time = UTCDateTime(pickline["PickTime"])
                pick.time_errors.uncertainty = float(pickline["PickError"])
                pick.extra.snr = {
                    "value": float(pickline["SNR"]),
                    "namespace": ns
                }
            elif method == "modelled":
                pick.time = UTCDateTime(pickline["ModelledTime"])
            else:
                continue
            event.picks.append(pick)

    # --- Handle amplitudes file ---
    amps_file = locate_dir / "amplitudes" / event_uid
    if amps_file.with_suffix(".amps").is_file():
        amps = pd.read_csv(amps_file.with_suffix(".amps"))

        i = 0
        for _, ampsline in amps.iterrows():
            wid = WaveformStreamID(seed_string=ampsline["id"])
            noise_amp = ampsline["Noise_amp"] / 1000  # mm to m
            for phase in ["P_amp", "S_amp"]:
                amp = Amplitude()
                if pd.isna(ampsline[phase]):
                    continue
                amp.generic_amplitude = ampsline[phase] / 1000  # mm to m
                amp.generic_amplitude_errors.uncertainty = noise_amp
                amp.unit = "m"
                amp.type = "AML"
                amp.method_id = phase
                amp.period = 1 / ampsline[f"{phase[0]}_freq"]
                amp.time_window = TimeWindow(
                    reference=UTCDateTime(ampsline[f"{phase[0]}_time"]))
                # amp.pick_id = ?
                amp.waveform_id = wid
                # amp.filter_id = ?
                amp.magnitude_hint = "ML"
                amp.evaluation_mode = "automatic"
                amp.extra = AttribDict()
                try:
                    amp.extra.filter_gain = {
                        "value": ampsline[f"{phase[0]}_filter_gain"],
                        "namespace": ns
                    }
                    amp.extra.avg_amp = {
                        "value": ampsline[f"{phase[0]}_avg_amp"] / 1000,  # m
                        "namespace": ns
                    }
                except KeyError:
                    pass

                if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]):
                    i += 1
                    stat_mag = StationMagnitude()
                    stat_mag.extra = AttribDict()
                    # stat_mag.origin_id = ? local_mag_loc
                    stat_mag.mag = ampsline["ML"]
                    stat_mag.mag_errors.uncertainty = ampsline["ML_Err"]
                    stat_mag.station_magnitude_type = "ML"
                    stat_mag.amplitude_id = amp.resource_id
                    stat_mag.extra.picked = {
                        "value": ampsline["is_picked"],
                        "namespace": ns
                    }
                    stat_mag.extra.epi_dist = {
                        "value": ampsline["epi_dist"],
                        "namespace": ns
                    }
                    stat_mag.extra.z_dist = {
                        "value": ampsline["z_dist"],
                        "namespace": ns
                    }

                    event.station_magnitudes.append(stat_mag)

                event.amplitudes.append(amp)

        mag = Magnitude()
        mag.extra = AttribDict()
        mag.mag = event_info["ML"]
        mag.mag_errors.uncertainty = event_info["ML_Err"]
        mag.magnitude_type = "ML"
        # mag.origin_id = ?
        mag.station_count = i
        mag.evaluation_mode = "automatic"
        mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns}

        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id

    return event