Exemplo n.º 1
0
def read_cat_ref(cat_file):
    """
    Parses a given refrence catalogue (in ascii format,see the header for details)
    output is Obspy catalogue object
    """
    cat_ref = np.loadtxt(cat_file,delimiter=',',skiprows=1)
    cat = Catalog()
    for i,e in enumerate(cat_ref):
        event = Event(resource_id='smi:local/='+str(i),creation_info='HG')
        origin = Origin()
        origin.time = UTCDateTime(int(e[2]),int(e[3]),int(e[4]),
                                  int(e[7]),int(e[8]),e[9])
        origin.longitude = e[0]
        origin.latitude = e[1]
        origin.depth = e[6] * 1000. #in meters
        event.origins.append(origin)
        if ~(np.isnan(e[10])):
            mag = Magnitude(creation_info='HER')
            mag.mag = e[10]
            mag.magnitude_type = 'Mw'
            event.magnitudes.append(mag)
        if ~(np.isnan(e[11])):
            mag = Magnitude(creation_info='MAR')
            mag.mag = e[11]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        if ~(np.isnan(e[12])):
            mag = Magnitude(creation_info='SIP')
            mag.mag = e[12]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        cat.append(event)
    return cat
Exemplo n.º 2
0
    def _parseRecordAE(self, line, event):
        """
        Parses the 'additional hypocenter error and magnitude record' AE
        """
        orig_time_stderr = self._floatUnused(line[2:7])
        latitude_stderr = self._floatUnused(line[8:14])
        longitude_stderr = self._floatUnused(line[15:21])
        depth_stderr = self._floatUnused(line[22:27])
        gap = self._floatUnused(line[28:33])
        mag1 = self._float(line[33:36])
        mag1_type = line[36:38]
        mag2 = self._float(line[43:46])
        mag2_type = line[46:48]

        evid = event.resource_id.id.split('/')[-1]
        #this record is to be associated to the latest origin
        origin = event.origins[-1]
        self._storeUncertainty(origin.time_errors, orig_time_stderr)
        self._storeUncertainty(origin.latitude_errors,
                               self._latErrToDeg(latitude_stderr))
        self._storeUncertainty(
            origin.longitude_errors,
            self._lonErrToDeg(longitude_stderr, origin.latitude))
        self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
        origin.quality.azimuthal_gap = gap
        if mag1 > 0:
            mag = Magnitude()
            mag1_id = mag1_type.lower()
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(
                agency_id=origin.creation_info.agency_id)
            mag.mag = mag1
            mag.magnitude_type = mag1_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
        if mag2 > 0:
            mag = Magnitude()
            mag2_id = mag2_type.lower()
            if mag2_id == mag1_id:
                mag2_id += '2'
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(
                agency_id=origin.creation_info.agency_id)
            mag.mag = mag2
            mag.magnitude_type = mag2_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
Exemplo n.º 3
0
def plot_some_events():
    from obspy.core.event import Catalog, Event, Origin, Magnitude
    from obspy.core import UTCDateTime as UTC

    eqs = """2008-09-10T16:12:03    6.0    -20.40    -69.40     40
    2008-03-24T20:39:06    5.9    -20.10    -69.20     85
    2008-03-01T19:51:59    5.7    -20.10    -69.60     15
    2008-02-15T16:54:04    5.5    -23.00    -70.20     32
    2008-02-04T17:01:30    6.6    -20.20    -70.00     36
    2007-12-16T08:09:16    7.1    -22.80    -70.00     14
    2007-11-14T15:40:51    7.8    -22.34    -70.06     37"""  #GEOFON:-22.30    -69.80
    events = []
    for eq in eqs.split('\n'):
        time, mag, lat, lon, depth = eq.split()
        ev = Event(event_type='earthquake', creation_info='GEOFON',
                    origins=[Origin(time=UTC(time), latitude=float(lat),
                                    longitude=float(lon), depth=float(depth))],
                    magnitudes=[Magnitude(mag=float(mag), magnitude_type='M')])
        events.append(ev)
    cat = Catalog(events[::-1])
    #print cat
    #cat.plot(projection='local')
    lons = [ev.origins[0].longitude for ev in cat]
    lats = [ev.origins[0].latitude for ev in cat]
    dates = [ev.origins[0].time for ev in cat]
    mags = [ev.magnitudes[0].mag for ev in cat]
Exemplo n.º 4
0
    def get_event_object(self):
        '''
        update events otime,lat,lon and mag with IRIS (or any other clients) catalog
        '''

        # get parameters from the cataog
        if self.use_catalog == 1:
            print("WARNING using event data from the IRIS catalog")
            cat = self.client.get_events(
                starttime=self.otime - self.sec_before_after_event,
                endtime=self.otime + self.sec_before_after_event)
            self.ev = cat[0]

            # use catalog parameters
            self.otime = self.ev.origins[0].time
            self.elat = self.ev.origins[0].latitude
            self.elon = self.ev.origins[0].longitude
            self.edep = self.ev.origins[0].depth
            self.emag = self.ev.magnitudes[0].mag

        # use parameters from the input file
        else:
            print("WARNING using event data from user-defined catalog")
            #self.ev = Event()
            org = Origin()
            org.latitude = self.elat
            org.longitude = self.elon
            org.depth = self.edep
            org.time = self.otime
            mag = Magnitude()
            mag.mag = self.emag
            mag.magnitude_type = "Mw"
            self.ev.origins.append(org)
            self.ev.magnitudes.append(mag)
Exemplo n.º 5
0
def __toMagnitude(parser, magnitude_el, origin):
    """
    Parses a given magnitude etree element.

    :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser`
    :param parser: Open XMLParser object.
    :type magnitude_el: etree.element
    :param magnitude_el: magnitude element to be parsed.
    :return: A ObsPy :class:`~obspy.core.event.Magnitude` object.
    """
    global CURRENT_TYPE
    mag = Magnitude()
    mag.resource_id = ResourceIdentifier(
        prefix="/".join([RESOURCE_ROOT, "magnitude"]))
    mag.origin_id = origin.resource_id
    mag.mag, mag.mag_errors = __toFloatQuantity(parser, magnitude_el, "mag")
    # obspyck used to write variance (instead of std) in magnitude error fields
    if CURRENT_TYPE == "obspyck":
        if mag.mag_errors.uncertainty is not None:
            mag.mag_errors.uncertainty = math.sqrt(mag.mag_errors.uncertainty)
            mag.mag_errors.confidence_level = 68.3
    mag.magnitude_type = parser.xpath2obj("type", magnitude_el)
    mag.station_count = parser.xpath2obj("stationCount", magnitude_el, int)
    mag.method_id = "%s/magnitude_method/%s/1" % (
        RESOURCE_ROOT, parser.xpath2obj('program', magnitude_el))
    if str(mag.method_id).lower().endswith("none"):
        mag.method_id = None

    return mag
Exemplo n.º 6
0
def _block2event(block, seed_map, id_default, ph2comp, eventid_map):
    """
    Read HypoDD event block
    """
    lines = block.strip().splitlines()
    yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split()
    if eventid_map is not None and id_ in eventid_map:
        id_ = eventid_map[id_]
    time = UTCDateTime(int(yr),
                       int(mo),
                       int(dy),
                       int(hr),
                       int(mn),
                       float(sc),
                       strict=False)
    laterr = None if float(eh) == 0 else float(eh) / DEG2KM
    lonerr = (None if laterr is None or float(la) > 89 else laterr /
              cos(deg2rad(float(la))))
    ez = None if float(ez) == 0 else float(ez) * 1000
    rms = None if float(rms) == 0 else float(rms)
    picks = []
    arrivals = []
    for line in lines[1:]:
        sta, reltime, weight, phase = line.split()
        comp = ph2comp.get(phase, '')
        wid = seed_map.get(sta, id_default)
        _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp))
        pick = Pick(waveform_id=_waveform_id,
                    phase_hint=phase,
                    time=time + float(reltime))
        arrival = Arrival(phase=phase,
                          pick_id=pick.resource_id,
                          time_weight=float(weight))
        picks.append(pick)
        arrivals.append(arrival)
    qu = OriginQuality(associated_phase_count=len(picks), standard_error=rms)
    origin = Origin(arrivals=arrivals,
                    resource_id="smi:local/origin/" + id_,
                    quality=qu,
                    latitude=float(la),
                    longitude=float(lo),
                    depth=1000 * float(dp),
                    latitude_errors=laterr,
                    longitude_errors=lonerr,
                    depth_errors=ez,
                    time=time)
    if mg.lower() == 'nan':
        magnitudes = []
        preferred_magnitude_id = None
    else:
        magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_)
        magnitudes = [magnitude]
        preferred_magnitude_id = magnitude.resource_id
    event = Event(resource_id="smi:local/event/" + id_,
                  picks=picks,
                  origins=[origin],
                  magnitudes=magnitudes,
                  preferred_origin_id=origin.resource_id,
                  preferred_magnitude_id=preferred_magnitude_id)
    return event
Exemplo n.º 7
0
 def setUpClass(cls) -> None:
     cls.event = Event(origins=[
         Origin(time=UTCDateTime(2019, 1, 1),
                latitude=-45.,
                longitude=90.0,
                depth=10000.)
     ],
                       magnitudes=[Magnitude(mag=7.4)])
     cls.event.preferred_origin_id = cls.event.origins[0].resource_id
Exemplo n.º 8
0
def CUSP_to_SC3_rel_mags(det_cat, temp_cat, selfs):
    """
    Take a catalog with relative magnitudes calculated using the Ristau 2009
    CUSP equation and correct them using the updated SeisComP3 scale

    :param det_cat: Catalog of detections and templates with magnitudes
    :param selfs: List of strings for self detection ids
    :return:
    """
    # Make a dictionary of the CUSP-derived moment, SeisComP M0 for templates
    temp_mag_dict = {ev.resource_id.id.split('/')[-1]:
                        {'Old M0':
                              local_to_moment(ev.magnitudes[0].mag,
                                              m=0.88, c=0.73),
                         'New M0':
                              local_to_moment(ev.magnitudes[0].mag,
                                              m=0.97, c=0.14)}
                     for ev in temp_cat}
    # Now loop the catalog and redo the calculations
    for det in det_cat:
        # First determine the relative moment (I didn't save these anywhere...)
        eid = det.resource_id.id.split('/')[-1]
        if eid in selfs:
            print('Template event: Adding a Mw magnitude')
            det.magnitudes.append(
                Magnitude(mag=ML_to_Mw(det.magnitudes[0].mag, m=0.97, c=0.14),
                          magnitude_type='Mw',
                          comments=[Comment(text='Ristau et al., 2016 BSSA')]))
            continue
        tid = det.resource_id.id.split('/')[-1].split('_')[0]
        det_mo = Mw_to_M0([m.mag for m in det.magnitudes
                           if m.magnitude_type == 'Mw'][0])
        rel_mo = det_mo / temp_mag_dict[tid]['Old M0']
        new_det_mo = rel_mo * temp_mag_dict[tid]['New M0']
        new_det_Mw = (2. / 3. * np.log10(new_det_mo)) - 9.
        new_det_ML = (0.97 * new_det_Mw) + 0.14
        det.magnitudes.append(
            Magnitude(mag=new_det_Mw, magnitude_type='Mw',
                      comments=[Comment(text='rel_mo={}'.format(rel_mo))]))
        det.magnitudes.append(
            Magnitude(mag=new_det_ML, magnitude_type='ML',
                      comments=[Comment(text='rel_mo={}'.format(rel_mo))]))
        det.preferred_magnitude_id = det.magnitudes[-2].resource_id.id
    return
Exemplo n.º 9
0
Arquivo: core.py Projeto: zurgeg/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [
                         int(v) if v is not None else 0 for v in comps[:-1]
                     ]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Exemplo n.º 10
0
def ORNL_events_to_cat(ornl_file):
    """Make Catalog from ORNL locations"""
    cat = Catalog()
    loc_df = pd.read_csv(ornl_file, infer_datetime_format=True)
    loc_df = loc_df.set_index('event_datetime')
    eid = 0
    for dt, row in loc_df.iterrows():
        ot = UTCDateTime(dt)
        hmc_east = row['x(m)']
        hmc_north = row['y(m)']
        hmc_elev = row['z(m)']
        errX = row['error_x (m)']
        errY = row['error_y (m)']
        errZ = row['error_z (m)']
        rms = row['rms (millisecond)']
        converter = SURF_converter()
        lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north,
                                              hmc_elev))
        o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev)
        o.origin_uncertainty = OriginUncertainty()
        o.quality = OriginQuality()
        ou = o.origin_uncertainty
        oq = o.quality
        ou.max_horizontal_uncertainty = np.max([errX, errY])
        ou.min_horizontal_uncertainty = np.min([errX, errY])
        o.depth_errors.uncertainty = errZ
        oq.standard_error = rms * 1e3
        extra = AttribDict({
            'hmc_east': {
                'value': hmc_east,
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': hmc_north,
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': hmc_elev,
                'namespace': 'smi:local/hmc'
            },
            'hmc_eid': {
                'value': eid,
                'namespace': 'smi:local/hmc'
            }
        })
        o.extra = extra
        rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f'))
        # Dummy magnitude of 1. for all events until further notice
        mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.))
        ev = Event(origins=[o], magnitudes=[mag], resource_id=rid)
        ev.preferred_origin_id = o.resource_id.id
        cat.events.append(ev)
        eid += 1
    return cat
Exemplo n.º 11
0
 def setUpClass(cls) -> None:
     cls.client = Client("GEONET")
     cls.tribe = read_tribe(
         os.path.join(
             os.path.abspath(os.path.dirname(os.path.dirname(__file__))),
             "test_data", "test_tribe.tgz"))
     cls.trigger_event = Event(origins=[
         Origin(time=UTCDateTime(2019, 1, 1),
                latitude=-45.,
                longitude=178.0,
                depth=10000.)
     ],
                               magnitudes=[Magnitude(mag=7.4)])
Exemplo n.º 12
0
def _block2event(block, seed_map, id_default, ph2comp):
    """
    Read HypoDD event block
    """
    lines = block.strip().splitlines()
    yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split()
    time = UTCDateTime(int(yr),
                       int(mo),
                       int(dy),
                       int(hr),
                       int(mn),
                       float(sc),
                       strict=False)
    picks = []
    arrivals = []
    for line in lines[1:]:
        sta, reltime, weight, phase = line.split()
        comp = ph2comp.get(phase, '')
        wid = seed_map.get(sta, id_default)
        _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp))
        pick = Pick(waveform_id=_waveform_id,
                    phase_hint=phase,
                    time=time + float(reltime))
        arrival = Arrival(phase=phase,
                          pick_id=pick.resource_id,
                          time_weight=float(weight))
        picks.append(pick)
        arrivals.append(arrival)
    qu = None if rms == '0.0' else OriginQuality(standard_error=float(rms))
    origin = Origin(arrivals=arrivals,
                    resource_id="smi:local/origin/" + id_,
                    quality=qu,
                    latitude=float(la),
                    longitude=float(lo),
                    depth=1000 * float(dp),
                    time=time)
    if mg.lower() == 'nan':
        magnitudes = []
        preferred_magnitude_id = None
    else:
        magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_)
        magnitudes = [magnitude]
        preferred_magnitude_id = magnitude.resource_id
    event = Event(resource_id="smi:local/event/" + id_,
                  picks=picks,
                  origins=[origin],
                  magnitudes=magnitudes,
                  preferred_origin_id=origin.resource_id,
                  preferred_magnitude_id=preferred_magnitude_id)
    return event
Exemplo n.º 13
0
Arquivo: core.py Projeto: znamy/obspy
    def _parse_magnitude(self, line):
        #    1-5  a5   magnitude type (mb, Ms, ML, mbmle, msmle)
        magnitude_type = line[0:5].strip()
        #      6  a1   min max indicator (<, >, or blank)
        # TODO figure out the meaning of this min max indicator
        min_max_indicator = line[5:6].strip()
        #   7-10  f4.1 magnitude value
        mag = float_or_none(line[6:10])
        #  12-14  f3.1 standard magnitude error
        mag_errors = float_or_none(line[11:14])
        #  16-19  i4   number of stations used to calculate magni-tude
        station_count = int_or_none(line[15:19])
        #  21-29  a9   author of the origin
        author = line[20:29].strip()
        #  31-38  a8   origin identification
        origin_id = line[30:38].strip()

        # process items
        if author:
            creation_info = CreationInfo(author=author)
        else:
            creation_info = None
        mag_errors = mag_errors and QuantityError(uncertainty=mag_errors)
        if origin_id:
            origin_id = self._construct_id(['origin', origin_id])
        else:
            origin_id = None
        if not magnitude_type:
            magnitude_type = None
        # magnitudes have no id field, so construct a unique one at least
        resource_id = self._construct_id(['magnitude'], add_hash=True)

        if min_max_indicator:
            msg = 'Magnitude min/max indicator field not yet implemented'
            warnings.warn(msg)

        # combine and return
        mag = Magnitude(magnitude_type=magnitude_type,
                        mag=mag,
                        station_count=station_count,
                        creation_info=creation_info,
                        mag_errors=mag_errors,
                        origin_id=origin_id,
                        resource_id=resource_id)
        # event init always sets an empty QuantityError, even when specifying
        # None, which is strange
        for key in ['mag_errors']:
            setattr(mag, key, None)
        return mag
Exemplo n.º 14
0
    def create_simple_magnitudes(
        self,
        events: Union[Event, Catalog],
        waveforms: Optional[Union[WaveformClient, Stream]] = None,
        restrict_to_arrivals: bool = False,
        author: Optional[str] = None,
        agency_id: Optional[str] = None,
        evaluation_mode: EvaluationModeType = "automatic",
        evaluation_status: EvaluationStatusType = "preliminary",
    ) -> Dict[Tuple[str, str], Magnitude]:
        """
        Create magnitude objects from calculated source params.

        Returns a dict of {(event_id, magnitude_type): Magnitude}

        Parameters
        ----------
        {ew_params}
        restrict_to_arrivals
            If True, restrict calculations to only include phases associated
            with the arrivals on the origin
        author
            Name of the person generating the magnitudes
        agency_id
            Name of the agency generating the magnitudes
        evaluation_mode
            Evaluation mode of the magnitudes
        evaluation_status
            Evaluation status of the magnitudes
        """
        df = self.calc_source_parameters(
            events, waveforms, restrict_to_arrivals=restrict_to_arrivals)
        eids = set(df.index)
        out = {}
        for eid in eids:
            for col_name, mag_type in self._mag_type_map.items():
                log = "log" in mag_type
                value = df.loc[eid, col_name]
                if not pd.isnull(value):
                    mag = Magnitude(
                        magnitude_type=mag_type,
                        method_id=self._method_uri,
                        creation_info=self._create_info(author, agency_id),
                        mag=np.log10(value) if log else value,
                        evaluation_mode=evaluation_mode,
                        evaluation_status=evaluation_status,
                    )
                    out[(eid, mag_type)] = mag
        return out
Exemplo n.º 15
0
def read_header_line(string_line):

    new_event = Event()
    line = string_line

    param_event = line.split()[1:]

    ### check if line as required number of arguments

    if len(param_event) != 14:
        return new_event

    ### Get parameters

    year, month, day = [int(x) for x in param_event[0:3]]
    hour, minu = [int(x) for x in param_event[3:5]]
    sec = float(param_event[5])
    if sec >= 60:
        sec = 59.999
    lat, lon, z = [float(x) for x in param_event[6:9]]
    mag = float(param_event[9])
    errh, errz, rms = [float(x) for x in param_event[10:13]]

    _time = UTCDateTime(year, month, day, hour, minu, sec)
    _origin_quality = OriginQuality(standard_error=rms)

    # change what's next to handle origin with no errors estimates

    origin = Origin(time=_time,
                    longitude=lon,
                    latitude=lat,
                    depth=z,
                    longitude_errors=QuantityError(uncertainty=errh),
                    latitude_errors=QuantityError(uncertainty=errh),
                    depth_errors=QuantityError(uncertainty=errz),
                    quality=_origin_quality)

    magnitude = Magnitude(mag=mag, origin_id=origin.resource_id)

    ### Return

    new_event.origins.append(origin)
    new_event.magnitudes.append(magnitude)
    new_event.preferred_origin_id = origin.resource_id
    new_event.preferred_magnitude_id = magnitude.resource_id

    return new_event
Exemplo n.º 16
0
def _phase_to_event(event_text):
    """
    Function to convert the text for one event in hypoDD phase format to \
    event object.

    :type event_text: dict
    :param event_text: dict of two elements, header and picks, header is a \
        str, picks is a list of str.

    :returns: obspy.core.event.Event
    """
    from obspy.core.event import Event, Origin, Magnitude
    from obspy.core.event import Pick, WaveformStreamID, Arrival
    from obspy import UTCDateTime
    ph_event = Event()
    # Extract info from header line
    # YR, MO, DY, HR, MN, SC, LAT, LON, DEP, MAG, EH, EZ, RMS, ID
    header = event_text['header'].split()
    ph_event.origins.append(Origin())
    ph_event.origins[0].time = UTCDateTime(
        year=int(header[1]),
        month=int(header[2]),
        day=int(header[3]),
        hour=int(header[4]),
        minute=int(header[5]),
        second=int(header[6].split('.')[0]),
        microsecond=int(float(('0.' + header[6].split('.')[1])) * 1000000))
    ph_event.origins[0].latitude = float(header[7])
    ph_event.origins[0].longitude = float(header[8])
    ph_event.origins[0].depth = float(header[9]) * 1000
    ph_event.origins[0].time_errors['Time_Residual_RMS'] = float(header[13])
    ph_event.magnitudes.append(Magnitude())
    ph_event.magnitudes[0].mag = float(header[10])
    ph_event.magnitudes[0].magnitude_type = 'M'
    # Extract arrival info from picks!
    for i, pick_line in enumerate(event_text['picks']):
        pick = pick_line.split()
        _waveform_id = WaveformStreamID(station_code=pick[0])
        pick_time = ph_event.origins[0].time + float(pick[1])
        ph_event.picks.append(
            Pick(waveform_id=_waveform_id, phase_hint=pick[3], time=pick_time))
        ph_event.origins[0].arrivals.append(
            Arrival(phase=ph_event.picks[i],
                    pick_id=ph_event.picks[i].resource_id))
        ph_event.origins[0].arrivals[i].time_weight = float(pick[2])
    return ph_event
Exemplo n.º 17
0
 def test_reactor_spin_up(self):
     reactor = Reactor(client=Client("GEONET"),
                       listener=self.listener,
                       trigger_func=self.trigger_func,
                       template_database=self.template_bank,
                       config=self.config)
     trigger_event = Event(origins=[
         Origin(time=UTCDateTime(2019, 1, 1),
                latitude=-45.,
                longitude=178.0,
                depth=10000.)
     ],
                           magnitudes=[Magnitude(mag=7.4)])
     reactor.spin_up(triggering_event=trigger_event)
     time.sleep(10)
     with self.assertRaises(SystemExit):
         reactor.stop()
Exemplo n.º 18
0
def read_regex(event_file, regex=regex_GEOFON, creation_info='GEOFON'):
    """
    Read events from event_file with the help of given regular expression.
    """
    with open(event_file, 'r') as f:
        filedata = f.read()
    event_matches = re.finditer(regex, filedata, re.VERBOSE + re.MULTILINE)
    list_ = [i.groupdict() for i in event_matches]
    events = []
    for event in list_:
        # convert numbers to float and int types
        for key, item in event.iteritems():
            if util.isint(item):
                event[key] = int(item)
            elif util.isfloat(item):
                event[key] = float(item)
            else:
                event[key] = item.strip()
        if 'latitude_sign' in event and event['latitude_sign'] == 'S':
            event['latitude'] = -event['latitude']
        if 'longitude_sign' in event and event['longitude_sign'] == 'W':
            event['longitude'] = -event['longitude']
        if 'AM' in event:
            ci = creation_info + (' automatic'
                                  if event['AM'] == 'A' else ' manual')
        else:
            ci = creation_info
        ev = Event(
            event_type='earthquake',
            creation_info=ci,
            origins=[
                Origin(time=UTC(event['time']),
                       latitude=event['latitude'],
                       longitude=event['longitude'],
                       depth=event['depth'])
            ],
            magnitudes=[Magnitude(mag=event['magnitude'], magnitude_type='M')],
            event_descriptions=[
                EventDescription(event['flinn'], 'flinn-engdahl region')
            ] if 'flinn' in event else None)
        events.append(ev)
    events.sort(key=lambda x: x.origins[0].time)
    return Catalog(events)
Exemplo n.º 19
0
def ncedc_dd_to_cat(path):
    """
    Read csv of ncedc dd locations to catalog

    :param path:
    :return:
    """
    cat = Catalog()
    with open(path, 'r') as f:
        next(f)
        for ln in f:
            ot, lat, lon, dp, mag, mt, _, _, _, _, _, eid = ln.split(',')
            ot = UTCDateTime('T'.join(ot.split()))
            o = Origin(latitude=lat, longitude=lon,
                       depth=float(dp) * 1000, time=ot)
            m = Magnitude(mag=mag, magnitude_type=mt)
            ev = Event(origins=[o], magnitudes=[m],
                       resource_id=ResourceIdentifier(id=eid))
            cat.events.append(ev)
    return cat
Exemplo n.º 20
0
def parse_eq_Canada(file_path):
    """
    Parse eqcanada text file to an obspy Catalog

    :param file_path: Path to downloaded file
    :return:
    """
    cat = Catalog()
    with open(file_path, 'r') as f:
        next(f)
        for ln in f:
            line = ln.split('|')
            rid = ResourceIdentifier(id='smi:local/{}'.format(line[0]))
            o = Origin(time=UTCDateTime(line[1]), latitude=float(line[2]),
                       longitude=float(line[3]), depth=float(line[4]))
            m = Magnitude(magnitude_type=line[5], mag=float(line[6]))
            e = Event(resource_id=rid, force_resource_id=False,
                      origins=[o], magnitudes=[m])
            cat.events.append(e)
    return cat
Exemplo n.º 21
0
def _mags(obj, evid, stamag=False, wid=None):
    mags = []
    pm = None
    for magtype1, magtype2 in MAG_MAP.items():
        magkey = 'mean ' * (not stamag) + 'magnitude ' + magtype1
        if magkey in obj:
            magv = obj[magkey]
            if 'inf' in magv:
                warn('invalid value for magnitude: %s (event id %s)'
                     % (magv, evid))
            else:
                magv = float(magv)
                mag = (StationMagnitude(station_magnitude_type=magtype2,
                                        mag=magv, waveform_id=wid)
                       if stamag else
                       Magnitude(magnitude_type=magtype2, mag=magv))
                mags.append(mag)
    if len(mags) == 1:
        pm = mags[0].resource_id
    return mags, pm
Exemplo n.º 22
0
 def test_more_than_three_mags(self):
     cat = Catalog()
     cat += full_test_event()
     cat[0].magnitudes.append(
         Magnitude(mag=0.9,
                   magnitude_type='MS',
                   creation_info=CreationInfo('TES'),
                   origin_id=cat[0].origins[0].resource_id))
     with NamedTemporaryFile(suffix='.out') as tf:
         # raises UserWarning: mb is not convertible
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat.write(tf.name, format='nordic')
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat_back = read_events(tf.name)
         for event_1, event_2 in zip(cat, cat_back):
             self.assertTrue(
                 len(event_1.magnitudes) == len(event_2.magnitudes))
             _assert_similarity(event_1, event_2)
Exemplo n.º 23
0
def read_origin(event_str):
    """
    Read the origin information from the REST file string

    :param event_str: Contents of file as list of str
    :type event_str: list

    :returns: :class:`obspy.core.event.Event`
    """
    event = Event()

    head = event_str[0].split()
    try:
        gap = float(head[17])
    except IndexError:
        gap = None
    origin = Origin(
        time=UTCDateTime(
            year=int(head[0]), julday=int(head[1]), hour=int(head[2]),
            minute=int(head[3])) + float(head[4]),
        latitude=float(head[5]), longitude=float(head[6]),
        depth=float(head[7]) * 1000, origin_quality=OriginQuality(
            standard_error=float(head[9]),
            azimuthal_gap=gap,
            used_phase_count=int(head[17])),
        longitude_errors=QuantityError(
            uncertainty=kilometer2degrees(float(head[12]))),
        latitude_errors=QuantityError(
            uncertainty=kilometer2degrees(float(head[11]))),
        depth_errors=QuantityError(uncertainty=float(head[13]) * 1000),
        method_id=ResourceIdentifier("smi:local/REST"),
        evaluation_mode="automatic")
    event.origins.append(origin)
    try:
        event.magnitudes.append(Magnitude(
            mag=float(head[19]), magnitude_type="M"))
    except IndexError:
        pass
    return event
Exemplo n.º 24
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Exemplo n.º 25
0
 def test_reactor_spin_up(self):
     rt_client = RealTimeClient(server_url="link.geonet.org.nz")
     reactor = Reactor(client=Client("GEONET"),
                       rt_client=rt_client,
                       listener=self.listener,
                       trigger_func=self.trigger_func,
                       template_database=self.template_bank,
                       template_lookup_kwargs=dict(),
                       real_time_tribe_kwargs=dict(threshold=8,
                                                   threshold_type="MAD",
                                                   trig_int=2),
                       plot_kwargs=dict(),
                       listener_kwargs=dict(make_templates=False))
     trigger_event = Event(origins=[
         Origin(time=UTCDateTime(2019, 1, 1),
                latitude=-45.,
                longitude=178.0,
                depth=10000.)
     ],
                           magnitudes=[Magnitude(mag=7.4)])
     reactor.background_spin_up(triggering_event=trigger_event)
     time.sleep(10)
     reactor.stop()
    def _on_file_save(self):
        """
        Creates a new obspy.core.event.Magnitude object and writes the moment
        magnitude to it.
        """
        # Get the save filename.
        filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...")
        filename = os.path.abspath(str(filename))
        mag = Magnitude()
        mag.mag = self.final_result["moment_magnitude"]
        mag.magnitude_type = "Mw"
        mag.station_count = self.final_result["station_count"]
        mag.evaluation_mode = "manual"
        # Link to the used origin.
        mag.origin_id = self.current_state["event"].origins[0].resource_id
        mag.method_id = "Magnitude picker Krischer"
        # XXX: Potentially change once this program gets more stable.
        mag.evaluation_status = "preliminary"
        # Write the other results as Comments.
        mag.comments.append( \
            Comment("Seismic moment in Nm: %g" % \
            self.final_result["seismic_moment"]))
        mag.comments.append( \
            Comment("Circular source radius in m: %.2f" % \
            self.final_result["source_radius"]))
        mag.comments.append( \
            Comment("Stress drop in Pa: %.2f" % \
            self.final_result["stress_drop"]))
        mag.comments.append( \
                Comment("Very rough Q estimation: %.1f" % \
            self.final_result["quality_factor"]))

        event = copy.deepcopy(self.current_state["event"])
        event.magnitudes.append(mag)
        cat = Catalog()
        cat.events.append(event)
        cat.write(filename, format="quakeml")
Exemplo n.º 27
0
    def test_creating_minimal_quakeml_with_mt(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(
            mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time, latitude=lat, longitude=lon,
                           depth=depth, resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populate event moment tensor
        ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = io.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = _read_quakeml(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
for line in lines:
    line_elements = line.split()
    eid = line_elements[-1]
    otime = uh.eid2otime(eid)
    elon = line_elements[6]
    elat = line_elements[7]
    edep = float(line_elements[8]) * 1000.0  # meters
    emag = line_elements[16]

    # create event object
    orig = Origin()
    orig.longitude = elon
    orig.latitude = elat
    orig.depth = edep
    orig.time = otime
    mag = Magnitude()
    mag.mag = emag
    mag.magnitude_type = "Mw"
    ev = Event()
    ev.origins.append(orig)
    ev.magnitudes.append(mag)

    if send_request:
        # get waveforms
        client = Client("IRIS")
        getwaveform_iris.run_get_waveform(c=client,
                                          event=ev,
                                          min_dist=min_dist,
                                          max_dist=max_dist,
                                          before=tbefore_sec,
                                          after=tafter_sec,
Exemplo n.º 29
0
    lat = out_3[i][6::33]
    lon = out_3[i][7::33]
    orid=out_3[i][8::33]
    dep = out_3[i][9::33]
    ml=out_3[i][10::33]
    for x, times in enumerate(pick_time):
        event.resource_id=str(out_3[i][0]) #assign evid
        rd = str(out_3[i][0])
        picks = Pick(resource_id=rd, time=UTCDateTime(pick_time[x]), 
               waveform_id = WaveformStreamID(network_code="CI", station_code=str(sta[x]), channel_code=str(cha[x])), phase_hint=str(phase[x]))
        origin = Origin(resource_id=(str(orid[x])), 
                  time = UTCDateTime(e_time[x]), 
                  longitude= str(lon[x]), 
                  latitude=str(lat[x]), 
                  depth=str(dep[x]))
        magnitude = Magnitude(mag = ml[x], magnitude_type = "M", origin_id = (str(orid[x]))) 
        arrival = Arrival(pick_id = rd, phase = str(phase[x])) 
        event.picks.append(picks) 
        event.origins.append(origin)
        origin.arrivals.append(arrival) 
        event.magnitudes.append(magnitude)
    db_catalog.append(event)


#Only include picks for stations used 
all_picks=[]
for event in db_catalog:
    stations = ['KCT', 'KMPB', 'KCR', 'KHMB', 'KCS', 'KCO', 'KMR', 'KPP']                
    event.picks = [pick for pick in event.picks if pick.waveform_id.station_code in stations]
    all_picks+= [(pick.waveform_id.station_code, pick.waveform_id.channel_code) for pick in event.picks]
Exemplo n.º 30
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat