Example #1
0
 def test_clear_method_resets_objects(self):
     """
     Tests that the clear() method properly resets all objects. Test for
     #449.
     """
     # Test with basic event object.
     e = Event(force_resource_id=False)
     e.comments.append(Comment(text="test"))
     e.event_type = "explosion"
     self.assertEqual(len(e.comments), 1)
     self.assertEqual(e.event_type, "explosion")
     e.clear()
     self.assertEqual(e, Event(force_resource_id=False))
     self.assertEqual(len(e.comments), 0)
     self.assertEqual(e.event_type, None)
     # Test with pick object. Does not really fit in the event test case but
     # it tests the same thing...
     p = Pick()
     p.comments.append(Comment(text="test"))
     p.phase_hint = "p"
     self.assertEqual(len(p.comments), 1)
     self.assertEqual(p.phase_hint, "p")
     # Add some more random attributes. These should disappear upon
     # cleaning.
     p.test_1 = "a"
     p.test_2 = "b"
     self.assertEqual(p.test_1, "a")
     self.assertEqual(p.test_2, "b")
     p.clear()
     self.assertEqual(len(p.comments), 0)
     self.assertEqual(p.phase_hint, None)
     self.assertFalse(hasattr(p, "test_1"))
     self.assertFalse(hasattr(p, "test_2"))
Example #2
0
    def _parse_record_hy(self, line):
        """
        Parses the 'hypocenter' record HY
        """
        date = line[2:10]
        time = line[11:20]
        # unused: location_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        depth = self._float(line[38:43])
        # unused: depth_quality = line[43]
        standard_dev = self._float(line[44:48])
        station_number = self._int(line[48:51])
        # unused: version_flag = line[51]
        fe_region_number = line[52:55]
        fe_region_name = self._decode_fe_region_number(fe_region_number)
        source_code = line[55:60].strip()

        event = Event()
        # FIXME: a smarter way to define evid?
        evid = date + time
        res_id = '/'.join((res_id_prefix, 'event', evid))
        event.resource_id = ResourceIdentifier(id=res_id)
        description = EventDescription(
            type='region name',
            text=fe_region_name)
        event.event_descriptions.append(description)
        description = EventDescription(
            type='Flinn-Engdahl region',
            text=fe_region_number)
        event.event_descriptions.append(description)
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo()
        if source_code:
            origin.creation_info.agency_id = source_code
        else:
            origin.creation_info.agency_id = 'USGS-NEIC'
        res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
        origin.earth_model_id = ResourceIdentifier(id=res_id)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinate_sign(lat_type)
        origin.longitude = longitude * self._coordinate_sign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.associated_station_count = station_number
        origin.quality.standard_error = standard_dev
        # associated_phase_count can be incremented in records 'P ' and 'S '
        origin.quality.associated_phase_count = 0
        # depth_phase_count can be incremented in record 'S '
        origin.quality.depth_phase_count = 0
        origin.origin_type = 'hypocenter'
        origin.region = fe_region_name
        event.origins.append(origin)
        return event
Example #3
0
 def test_449(self):
     """
     Very basic test for #449
     """
     e = Event()
     e.comments.append("test")
     e.clear()
     self.assertTrue(e == Event())
Example #4
0
 def test_str_empty_origin(self):
     """
     Ensure an event with an empty origin returns a str without raising a
     TypeError (#2119).
     """
     event = Event(origins=[Origin()])
     out = event.short_str()
     self.assertIsInstance(out, str)
     self.assertEqual(out, 'None | None, None')
Example #5
0
File: core.py Project: Brtle/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [int(v) if v is not None else 0
                                 for v in comps[:-1]]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Example #6
0
 def test_issue_2173(self):
     """
     Ensure events with empty origins are equal after round-trip to disk.
     See #2173.
     """
     # create event and save to disk
     origin = Origin(time=UTCDateTime('2016-01-01'))
     event1 = Event(origins=[origin])
     bio = io.BytesIO()
     event1.write(bio, 'quakeml')
     # read from disk
     event2 = read_events(bio)[0]
     # saved and loaded event should be equal
     self.assertEqual(event1, event2)
Example #7
0
def make_event(catalog_entry):
    """ Creates an ObsPy Event object from 
    a line of STP event output.
    """
    #print(catalog_entry)
    fields = catalog_entry.split()

    evid = fields[0]
    etype = fields[1]
    origin_time = UTCDateTime(
        datetime.strptime(fields[3], "%Y/%m/%d,%H:%M:%S.%f"))

    lat = float(fields[4])
    lon = float(fields[5])
    depth = float(fields[6])
    mag = float(fields[7])
    magtype = fields[8]

    res_id = ResourceIdentifier(id=evid)
    origin = Origin(latitude=lat, longitude=lon, depth=depth, time=origin_time)

    magnitude = Magnitude(mag=mag, magnitude_type=MAGTYPE_MAPPING[magtype])
    event = Event(resource_id=res_id,
                  event_type=ETYPE_MAPPING[etype],
                  origins=[origin],
                  magnitudes=[magnitude])
    return event
Example #8
0
 def test_write_empty(self):
     """
     Function to check that writing a blank event works as it should.
     """
     test_event = Event()
     with self.assertRaises(NordicParsingError):
         _write_nordic(test_event,
                       filename=None,
                       userid='TEST',
                       evtype='L',
                       outdir='.',
                       wavefiles='test')
     test_event.origins.append(Origin())
     with self.assertRaises(NordicParsingError):
         _write_nordic(test_event,
                       filename=None,
                       userid='TEST',
                       evtype='L',
                       outdir='.',
                       wavefiles='test')
     test_event.origins[0].time = UTCDateTime()
     with TemporaryWorkingDirectory():
         test_sfile = _write_nordic(test_event,
                                    filename=None,
                                    userid='TEST',
                                    evtype='L',
                                    outdir='.',
                                    wavefiles='test')
         self.assertTrue(os.path.isfile(test_sfile))
Example #9
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    focal = FocalMechanism(nodal_planes=nodal_planes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        x = 1
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Example #10
0
def plot_some_events():
    from obspy.core.event import Catalog, Event, Origin, Magnitude
    from obspy.core import UTCDateTime as UTC

    eqs = """2008-09-10T16:12:03    6.0    -20.40    -69.40     40
    2008-03-24T20:39:06    5.9    -20.10    -69.20     85
    2008-03-01T19:51:59    5.7    -20.10    -69.60     15
    2008-02-15T16:54:04    5.5    -23.00    -70.20     32
    2008-02-04T17:01:30    6.6    -20.20    -70.00     36
    2007-12-16T08:09:16    7.1    -22.80    -70.00     14
    2007-11-14T15:40:51    7.8    -22.34    -70.06     37"""  #GEOFON:-22.30    -69.80
    events = []
    for eq in eqs.split('\n'):
        time, mag, lat, lon, depth = eq.split()
        ev = Event(event_type='earthquake', creation_info='GEOFON',
                    origins=[Origin(time=UTC(time), latitude=float(lat),
                                    longitude=float(lon), depth=float(depth))],
                    magnitudes=[Magnitude(mag=float(mag), magnitude_type='M')])
        events.append(ev)
    cat = Catalog(events[::-1])
    #print cat
    #cat.plot(projection='local')
    lons = [ev.origins[0].longitude for ev in cat]
    lats = [ev.origins[0].latitude for ev in cat]
    dates = [ev.origins[0].time for ev in cat]
    mags = [ev.magnitudes[0].mag for ev in cat]
Example #11
0
    def build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False, mt=None):
        """
        Build up an Event object
    
        Inputs
        ------
        evid       : int of EVID
        orid       : int of ORID
        delete     : bool of whether to mark event deleted (False)
        phase_data : bool of whether to include phase arrivals for event (False)
        focal_data : bool of whether to look for focal mechanisms (False)
        mt         : file/contents of NSL moment tensor (Ichinose)

        Returns : obspy.core.event.Event
        
        """
        #--- Build an Event based on params --------------------------------------
        if evid is None and orid:
            try:
                evid = self._evid(orid)
            except:
                pass
        # 1. Build a stub Event to send a delete
        if delete:
            self.event = Event(event_type="not existing")
            self.event.creation_info = CreationInfo(version=evid, creation_time=UTCDateTime())
            self.event.resource_id = self._rid(self.event)
        elif mt:
        # 2. Make a custom event (mt is a special-formatted text file)
            #_RIDFactory = type('RIDFactory', (CustomRIDFunction,), {'authority': self.auth_id})
            self.event = mt2event(mt, rid_factory=CustomRIDFunction(self.auth_id))
        # 3. Use EventBuilder to get Event from the db
        else:
            self._build(orid=orid, phases=phase_data, focals=focal_data, event_type="not reported")
        
        # if no EVID reported, try to get it from the db (version attribute)
        if not evid:
            evid = int(self.event.creation_info.version)
        
        # Add a nearest event string, try to set event type with custom etype additions
        prefor = self.event.preferred_origin()
        if prefor is not None:
            event_type = self.origin_event_type(prefor, emap=self.emap)
            if event_type is None:
                event_type = "earthquake"
            self.event.event_type = event_type
            ed = self.get_nearest_event_description(prefor.latitude, prefor.longitude)
            self.event.event_descriptions = [ed]

        # get rid of preferred if sending focalmech, so it doesn't clobber a 
        # better origin (This is a hack to deal with USGS splitting QuakeML 
        # into different products, In theory, one should be able to have a 
        # QuakeML file with everything, but alas)
        if focal_data:
            self.event.preferred_origin_id = None
            self.event.preferred_magnitude_id = None

        # Generate NSL namespace attributes
        extra_attributes = self.quakeml_anss_attrib(evid)
        self.event.extra = self.extra_anss(**extra_attributes)
Example #12
0
def _block2event(block, seed_map, id_default, ph2comp, eventid_map):
    """
    Read HypoDD event block
    """
    lines = block.strip().splitlines()
    yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split()
    if eventid_map is not None and id_ in eventid_map:
        id_ = eventid_map[id_]
    time = UTCDateTime(int(yr),
                       int(mo),
                       int(dy),
                       int(hr),
                       int(mn),
                       float(sc),
                       strict=False)
    laterr = None if float(eh) == 0 else float(eh) / DEG2KM
    lonerr = (None if laterr is None or float(la) > 89 else laterr /
              cos(deg2rad(float(la))))
    ez = None if float(ez) == 0 else float(ez) * 1000
    rms = None if float(rms) == 0 else float(rms)
    picks = []
    arrivals = []
    for line in lines[1:]:
        sta, reltime, weight, phase = line.split()
        comp = ph2comp.get(phase, '')
        wid = seed_map.get(sta, id_default)
        _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp))
        pick = Pick(waveform_id=_waveform_id,
                    phase_hint=phase,
                    time=time + float(reltime))
        arrival = Arrival(phase=phase,
                          pick_id=pick.resource_id,
                          time_weight=float(weight))
        picks.append(pick)
        arrivals.append(arrival)
    qu = OriginQuality(associated_phase_count=len(picks), standard_error=rms)
    origin = Origin(arrivals=arrivals,
                    resource_id="smi:local/origin/" + id_,
                    quality=qu,
                    latitude=float(la),
                    longitude=float(lo),
                    depth=1000 * float(dp),
                    latitude_errors=laterr,
                    longitude_errors=lonerr,
                    depth_errors=ez,
                    time=time)
    if mg.lower() == 'nan':
        magnitudes = []
        preferred_magnitude_id = None
    else:
        magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_)
        magnitudes = [magnitude]
        preferred_magnitude_id = magnitude.resource_id
    event = Event(resource_id="smi:local/event/" + id_,
                  picks=picks,
                  origins=[origin],
                  magnitudes=magnitudes,
                  preferred_origin_id=origin.resource_id,
                  preferred_magnitude_id=preferred_magnitude_id)
    return event
Example #13
0
def _qc_event(
    event: Event,
    min_stations: int = None,
    auto_picks: bool = True,
    auto_event: bool = True,
    event_type: Union[list, str] = None,
) -> tuple:
    """
    QC an individual event - removes picks in place.

    Returns
    -------
    tuple of (event: Event, keep: bool)
    """
    if event_type is not None and isinstance(event_type, str):
        event_type = [event_type]
    if event_type is not None and event.event_type not in event_type:
        return event, False
    elif not auto_event:
        if "manual" not in [ori.evaluation_mode for ori in event.origins]:
            return event, False
    if not auto_picks:
        pick_ids_to_remove = [
            p.resource_id for p in event.picks
            if p.evaluation_mode == "automatic"
        ]
        # remove arrivals and amplitudes and station_magnitudes
        event.picks = [
            p for p in event.picks if p.resource_id not in pick_ids_to_remove
        ]
        event = remove_unreferenced(event)[0]
    stations = {p.waveform_id.station_code for p in event.picks}
    if len(stations) < min_stations:
        return event, False
    return event, True
Example #14
0
    def __init__(
        self,
        or_time: utct = utct("2020-3-10T12:00:00"),
        lat_src: float = 10.99032013,
        lon_src: float = 170,
        lat_rec: float = 4.502384,
        lon_rec: float = 135.623447,
        depth: float = 45.0,
        name: str = "Test_Event",
    ):
        """
    Create a seismic event
    :param rec_lat: latitude receiver
    :param rec_lon: longitude receiver
    """
        self.event = Event()
        self.event.latitude = lat_src
        self.event.longitude = lon_src
        self.event.depth = depth
        self.event.name = name

        self.lat_rec = lat_rec
        self.lon_rec = lon_rec

        epi, az, baz = EventObj.Get_location(
            self.event.latitude, self.event.longitude, self.lat_rec, self.lon_rec
        )

        self.event.distance = epi
        print(self.event.distance)
        self.event.az = az
        self.event.baz = baz
        self.event.origin_time = or_time
Example #15
0
    def from_event(cls, event: Event):
        try:
            origin = event.preferred_origin() or event.origins[-1]
        except IndexError:
            raise NotImplementedError("Event needs an origin")
        polarities = []

        for pick in event.picks:
            if pick.polarity and pick.phase_hint.startswith("P"):
                # Get the arrival
                pick_seed_id = pick.waveform_id.get_seed_string()
                print(f"Found polarity of {pick.polarity} for {pick_seed_id}")
                for arr in origin.arrivals:
                    arr_pick = arr.pick_id.get_referred_object()
                    if arr_pick and arr_pick.waveform_id.get_seed_string(
                    ) == pick_seed_id:
                        if arr.phase == "P":
                            if arr.takeoff_angle < 0:
                                toa = abs(arr.takeoff_angle)
                                az = arr.azimuth % 360
                            else:
                                toa = arr.takeoff_angle
                                az = (arr.azimuth + 180) % 360
                            polarity = Polarity(az,
                                                toa,
                                                pick.polarity,
                                                station=pick_seed_id)
                            polarities.append(polarity)
                            break
                else:
                    print(
                        f"No arrival found for polarity pick on {pick_seed_id}"
                    )
        return cls(polarities=polarities)
Example #16
0
 def setUpClass(cls) -> None:
     cls.event = Event(origins=[Origin(time=UTCDateTime(2001, 3, 26))],
                       picks=[
                           Pick(time=UTCDateTime(2001, 3, 26, 1, 1, 1)),
                           Pick(time=UTCDateTime(2001, 3, 26, 1, 1, 5))
                       ])
     cls.event.preferred_origin_id = cls.event.origins[0].resource_id
Example #17
0
def read_cat_ref(cat_file):
    """
    Parses a given refrence catalogue (in ascii format,see the header for details)
    output is Obspy catalogue object
    """
    cat_ref = np.loadtxt(cat_file,delimiter=',',skiprows=1)
    cat = Catalog()
    for i,e in enumerate(cat_ref):
        event = Event(resource_id='smi:local/='+str(i),creation_info='HG')
        origin = Origin()
        origin.time = UTCDateTime(int(e[2]),int(e[3]),int(e[4]),
                                  int(e[7]),int(e[8]),e[9])
        origin.longitude = e[0]
        origin.latitude = e[1]
        origin.depth = e[6] * 1000. #in meters
        event.origins.append(origin)
        if ~(np.isnan(e[10])):
            mag = Magnitude(creation_info='HER')
            mag.mag = e[10]
            mag.magnitude_type = 'Mw'
            event.magnitudes.append(mag)
        if ~(np.isnan(e[11])):
            mag = Magnitude(creation_info='MAR')
            mag.mag = e[11]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        if ~(np.isnan(e[12])):
            mag = Magnitude(creation_info='SIP')
            mag.mag = e[12]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        cat.append(event)
    return cat
Example #18
0
 def create_catalog(time):
     """
     Create a events object with a UTCTimeObject as event creation info.
     """
     creation_info = CreationInfo(creation_time=obspy.UTCDateTime(time))
     event = Event(creation_info=creation_info)
     return Catalog(events=[event])
Example #19
0
def read_header_line(string_line):

    new_event = Event()
    line = string_line

    param_event = line.split()[1:]

    ### check if line as required number of arguments

    if len(param_event) != 14:
        return new_event

    ### Get parameters

    year, month, day = [int(x) for x in param_event[0:3]]
    hour, minu = [int(x) for x in param_event[3:5]]
    sec = float(param_event[5])
    if sec >= 60:
        sec = 59.999
    lat, lon, z = [float(x) for x in param_event[6:9]]
    mag = float(param_event[9])
    errh, errz, rms = [float(x) for x in param_event[10:13]]

    _time = UTCDateTime(year, month, day, hour, minu, sec)
    _origin_quality = OriginQuality(standard_error=rms)

    # change what's next to handle origin with no errors estimates

    origin = Origin(time=_time,
                    longitude=lon,
                    latitude=lat,
                    depth=z,
                    longitude_errors=QuantityError(uncertainty=errh),
                    latitude_errors=QuantityError(uncertainty=errh),
                    depth_errors=QuantityError(uncertainty=errz),
                    quality=_origin_quality)

    magnitude = Magnitude(mag=mag, origin_id=origin.resource_id)

    ### Return

    new_event.origins.append(origin)
    new_event.magnitudes.append(magnitude)
    new_event.preferred_origin_id = origin.resource_id
    new_event.preferred_magnitude_id = magnitude.resource_id

    return new_event
Example #20
0
def attach_all_resource_ids(event: Event):
    """recurse all objects in a events and set referred objects"""
    rid_to_object = {}
    # first pass, bind all resource ids to parent
    for rid, parent, attr in yield_obj_parent_attr(event, ResourceIdentifier):
        if attr == "resource_id":
            # if the object has already been set and is not unique, raise
            rid.set_referred_object(parent)
            if rid.id in rid_to_object:
                assert rid.get_referred_object() is rid_to_object[rid.id]
            # else set referred object
            rid_to_object[rid.id] = parent
    # second pass, bind all other resource ids to correct resource ids
    for rid, parent, attr in yield_obj_parent_attr(event, ResourceIdentifier):
        if attr != "resource_id" and rid.id in rid_to_object:
            rid.set_referred_object(rid_to_object[rid.id])
    event.scope_resource_ids()
Example #21
0
def find_event(cat, egf_time):
    event = Event()
    for i in range(len(cat)):
        if abs(cat[i].origins[-1].time - egf_time) < 1:
            event = cat[i]
        else:
            continue
    return event
Example #22
0
 def test_separated_dist_longlong(self):
     peaks = np.array([100, 65, 20, 120, 300])
     index = np.array([2000, 5000, 10, 70, 500])
     index = index * int(1e10)
     trig_int = 100 * int(1e10)
     hypocentral_separation = 10.0
     catalog = Catalog([
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=80.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
     ])
     peaks_out = decluster_distance_time(
         peaks, index, trig_int, catalog, hypocentral_separation,
         threshold=0)
     assert len(peaks) == len(peaks_out)
Example #23
0
def event_magnitude(event: Event) -> float:
    """ Get a magnitude for the event. """
    try:
        magnitude = event.preferred_magnitude() or event.magnitudes[0]
        magnitude = magnitude.mag
    except IndexError:
        magnitude = None
    return magnitude
Example #24
0
def inter_event_distance(event1: Event, event2: Event) -> float:
    """
    Calculate the distance (in degrees) between two events

    returns
    -------
        distance in degrees between events
    """
    try:
        origin_1 = event1.preferred_origin() or event1.origins[0]
        origin_2 = event2.preferred_origin() or event2.origins[0]
    except IndexError:
        return 180.
    return locations2degrees(lat1=origin_1.latitude,
                             long1=origin_1.longitude,
                             lat2=origin_2.latitude,
                             long2=origin_2.longitude)
Example #25
0
 def test_preferred_tags(self):
     """
     Testing preferred magnitude, origin and focal mechanism tags
     """
     # testing empty event
     ev = Event()
     self.assertEqual(ev.preferred_origin(), None)
     self.assertEqual(ev.preferred_magnitude(), None)
     self.assertEqual(ev.preferred_focal_mechanism(), None)
     # testing existing event
     filename = os.path.join(self.path, 'preferred.xml')
     catalog = read_events(filename)
     self.assertEqual(len(catalog), 1)
     ev_str = "Event:\t2012-12-12T05:46:24.120000Z | +38.297, +142.373 " + \
              "| 2.0 MW"
     self.assertIn(ev_str, str(catalog.events[0]))
     # testing ids
     ev = catalog.events[0]
     self.assertEqual('smi:orig2', ev.preferred_origin_id)
     self.assertEqual('smi:mag2', ev.preferred_magnitude_id)
     self.assertEqual('smi:fm2', ev.preferred_focal_mechanism_id)
     # testing objects
     self.assertEqual(ev.preferred_origin(), ev.origins[1])
     self.assertEqual(ev.preferred_magnitude(), ev.magnitudes[1])
     self.assertEqual(
         ev.preferred_focal_mechanism(), ev.focal_mechanisms[1])
Example #26
0
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split("\n"):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split("\t", 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get("lon"))
         origin.latitude = self._str2num(values.get("lat"))
         depth = self._str2num(values.get("depth"))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get("z_err"))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get("h_err"))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = "horizontal uncertainty"
             origin.origin_uncertainty = ou
         year = self._str2num(values.get("year"))
         if year is not None:
             t_fields = ["year", "month", "day", "hour", "minute", "second"]
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 utc_args = [int(v) for v in comps if v is not None]
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get("mag"))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get("m_err"))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         catalog.append(event)
     return catalog
Example #27
0
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 utc_args = [int(v) for v in comps if v is not None]
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         catalog.append(event)
     return catalog
Example #28
0
 def test_clustered_dist_time(self):
     peaks = np.array([100, 65, 20, 120, 300])
     index = np.array([2000, 5000, 10, 70, 500])
     trig_int = 100
     hypocentral_separation = 10.0
     catalog = Catalog([
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
         Event(origins=[Origin(latitude=0.0, longitude=90.0, depth=1000.)]),
     ])
     peaks_out = decluster_distance_time(
         peaks, index, trig_int, catalog, hypocentral_separation,
         threshold=0)
     assert len(peaks) > len(peaks_out)
     assert peaks_out == [(300.0, 500), (120.0, 70), (100.0, 2000),
                          (65.0, 5000)]
Example #29
0
File: core.py Project: znamy/obspy
 def _read_event_header(self):
     line = self._get_next_line()
     event_id = self._construct_id(['event', line[6:14].strip()])
     region = line[15:80].strip()
     event = Event(resource_id=event_id,
                   event_descriptions=[
                       EventDescription(text=region, type='region name')
                   ])
     self.cat.append(event)
Example #30
0
 def setUpClass(cls) -> None:
     cls.event = Event(origins=[
         Origin(time=UTCDateTime(2019, 1, 1),
                latitude=-45.,
                longitude=90.0,
                depth=10000.)
     ],
                       magnitudes=[Magnitude(mag=7.4)])
     cls.event.preferred_origin_id = cls.event.origins[0].resource_id
Example #31
0
 def catalog(self, bingham_dataset, new_time):
     """
     Assemble a events to test yield_event_waveforms with an event
     that was not initiated from the start.
     """
     # get first event, create new origin to slightly add some time.
     ori = Origin(time=new_time, latitude=47.1, longitude=-100.22)
     event = Event(origins=[ori])
     return obspy.Catalog(events=[event])
Example #32
0
def read_nlloc_sum(file_in):
    """
    Function made to read a nlloc hypocenter-file and store it into a simple LOTOS_class Catalog
    The ID is read from the event.comments part
    """
    from obspy.io.nlloc.core import read_nlloc_hyp
    from lotos.LOTOS_class import Catalog, Event, Phase
    from general import util as gutil

    #file_in='/media/baillard/Shared/Dropbox/_Moi/Projects/Axial/PROG/NLLOC_AXIAL/loc3/AXIAL.20170130.005908.grid0.loc.hyp'
    #file_in='/media/baillard/Shared/Dropbox/_Moi/Projects/Axial/PROG/NLLOC_AXIAL/loc3/sum.nlloc'

    Ray = Catalog()
    cat = read_nlloc_hyp(file_in)

    stations_dic = Ray.stations_realname

    for event in cat:

        id_event = event.comments[0].text
        origin = event.preferred_origin()
        OT = origin.time

        #### Initialize Event

        Event_p = Event()

        Event_p.x = origin.longitude
        Event_p.y = origin.latitude
        Event_p.z = origin.depth / 1000
        Event_p.id = id_event
        Event_p.ot = OT
        Event_p.num_phase = origin.quality.used_phase_count
        Picks_p = event.picks

        for arrival in origin.arrivals:
            Phase_p = Phase()

            if arrival.phase in ['P', 'Pn']:
                Phase_p.type = 1
            else:
                Phase_p.type = 2

            Pick_p = gutil.getPickForArrival(Picks_p, arrival)
            Phase_p.station = stations_dic[Pick_p.waveform_id.station_code]
            Phase_p.t_obs = Pick_p.time - OT
            Phase_p.t_tho = Phase_p.t_obs - arrival.time_residual

            Event_p.phases.append(Phase_p)

        Ray.events.append(Event_p)

    return Ray
Example #33
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     self.assertEqual(len(catalog), 0)
     catalog.append(event1)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(catalog.events, [event1])
     catalog.append(event2)
     self.assertEqual(len(catalog), 2)
     self.assertEqual(catalog.events, [event1, event2])
     # 2 - adding objects other as Event should fails
     self.assertRaises(TypeError, catalog.append, str)
     self.assertRaises(TypeError, catalog.append, Catalog)
     self.assertRaises(TypeError, catalog.append, [event1])
Example #34
0
 def test_eq(self):
     """
     Testing the __eq__ method of the Event object.
     """
     # events are equal if the have the same public_id
     # Catch warnings about the same different objects with the same
     # resource id so they do not clutter the test output.
     with warnings.catch_warnings() as _:  # NOQA
         warnings.simplefilter("ignore")
         ev1 = Event(resource_id='id1')
         ev2 = Event(resource_id='id1')
         ev3 = Event(resource_id='id2')
     self.assertEqual(ev1, ev2)
     self.assertEqual(ev2, ev1)
     self.assertFalse(ev1 == ev3)
     self.assertFalse(ev3 == ev1)
     # comparing with other objects fails
     self.assertFalse(ev1 == 1)
     self.assertFalse(ev2 == "id1")
Example #35
0
    def test_seishub(self):
        """Test the seishub method, use obspy default seishub client."""
        import sys
        if sys.version_info.major == 2:
            from future.backports.urllib.request import URLError
        else:
            from urllib.request import URLError
        t = UTCDateTime(2009, 9, 3)
        test_cat = Catalog()
        test_cat.append(Event())
        test_cat[0].origins.append(Origin())
        test_cat[0].origins[0].time = t
        test_cat[0].origins[0].latitude = 45
        test_cat[0].origins[0].longitude = 45
        test_cat[0].origins[0].depth = 5000
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHZ',
                                              network_code='BW'),
                 phase_hint='PG',
                 time=t + 2000))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHN',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHE',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005.5))

        test_url = "http://teide.geophysik.uni-muenchen.de:8080"

        if sys.version_info.major == 3:
            try:
                template = template_gen(method="from_seishub",
                                        catalog=test_cat,
                                        url=test_url,
                                        lowcut=1.0,
                                        highcut=5.0,
                                        samp_rate=20,
                                        filt_order=4,
                                        length=3,
                                        prepick=0.5,
                                        swin='all',
                                        process_len=300)
            except URLError:
                pass
        else:
            pass
        if 'template' in locals():
            self.assertEqual(len(template), 3)
Example #36
0
    def _parse_event(self, first_line):
        """
        Parse an event.

        :type first_line: str
        :param first_line: First line of an event block, which contains
            the event id.
        :rtype: :class:`~obspy.core.event.event.Event`
        :return: The parsed event or None.
        """
        event_id = first_line[5:].strip()
        # Skip event without id
        if not event_id:
            self._warn('Missing event id')
            return None

        event = Event()

        origin, origin_res_id = self._parse_origin(event)
        # Skip event without origin
        if not origin:
            return None

        line = self._skip_empty_lines()

        self._parse_region_name(line, event)
        self._parse_arrivals(event, origin, origin_res_id)

        # Origin ResourceIdentifier should be set at the end, when
        # Arrivals are already set.
        origin.resource_id = origin_res_id
        event.origins.append(origin)

        event.preferred_origin_id = origin.resource_id.id

        # Must be done after the origin parsing
        event.creation_info = self._get_creation_info()

        public_id = "event/%s" % event_id
        event.resource_id = self._get_res_id(public_id)

        return event
Example #37
0
 def __init__(self, parent=None, evtdata=None):
     self._parent = parent
     if self.getParent():
         self.comp = parent.getComponent()
     else:
         self.comp = 'Z'
         self.wfdata = Stream()
     self._new = False
     if isinstance(evtdata, ObsPyEvent) or isinstance(evtdata, Event):
         pass
     elif isinstance(evtdata, dict):
         evt = readPILOTEvent(**evtdata)
         evtdata = evt
     elif isinstance(evtdata, str):
         try:
             cat = read_events(evtdata)
             if len(cat) is not 1:
                 raise ValueError('ambiguous event information for file: '
                                  '{file}'.format(file=evtdata))
             evtdata = cat[0]
         except TypeError as e:
             if 'Unknown format for file' in e.message:
                 if 'PHASES' in evtdata:
                     picks = picksdict_from_pilot(evtdata)
                     evtdata = ObsPyEvent()
                     evtdata.picks = picks_from_picksdict(picks)
                 elif 'LOC' in evtdata:
                     raise NotImplementedError('PILOT location information '
                                               'read support not yet '
                                               'implemeted.')
                 else:
                     raise e
             else:
                 raise e
     else:  # create an empty Event object
         self.setNew()
         evtdata = ObsPyEvent()
         evtdata.picks = []
     self.evtdata = evtdata
     self.wforiginal = None
     self.cuttimes = None
     self.dirty = False
Example #38
0
    def _parse_event(self, first_line):
        """
        Parse an event.

        :type first_line: str
        :param first_line: First line of an event block, which contains
            the event id.
        :rtype: :class:`~obspy.core.event.event.Event`
        :return: The parsed event or None.
        """
        event_id = first_line[5:].strip()
        # Skip event without id
        if not event_id:
            self._warn('Missing event id')
            return None

        event = Event()

        origin, origin_res_id = self._parse_origin(event)
        # Skip event without origin
        if not origin:
            return None

        line = self._skip_empty_lines()

        self._parse_region_name(line, event)
        self._parse_arrivals(event, origin, origin_res_id)

        # Origin ResourceIdentifier should be set at the end, when
        # Arrivals are already set.
        origin.resource_id = origin_res_id
        event.origins.append(origin)

        event.preferred_origin_id = origin.resource_id.id

        # Must be done after the origin parsing
        event.creation_info = self._get_creation_info()

        public_id = "event/%s" % event_id
        event.resource_id = self._get_res_id(public_id)

        return event
Example #39
0
def event_time(event: Event) -> UTCDateTime:
    """ Get a time for an event. """
    try:
        timed_obj = event.preferred_origin() or event.origins[0]
    except IndexError:
        try:
            timed_obj = sorted(event.picks, key=lambda p: p.time)[0]
        except IndexError:
            print("Neither origin nor pick found")
            return UTCDateTime(0)
    return timed_obj.time
Example #40
0
def _get_plot_starttime(event: Event, st: Stream) -> UTCDateTime:
    """Get starttime of a plot given an event and a stream."""
    try:
        attribute_with_time = event.preferred_origin() or event.origins[0]
    except (AttributeError, IndexError):
        try:
            attribute_with_time = AttribDict(
                {"time": min([p.time for p in event.picks]) - 5})
        except ValueError:
            attribute_with_time = AttribDict(
                {"time": min([tr.stats.starttime for tr in st])})
    return attribute_with_time.time
Example #41
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Example #42
0
 def test_clear_method_resets_objects(self):
     """
     Tests that the clear() method properly resets all objects. Test for
     #449.
     """
     # Test with basic event object.
     e = Event(force_resource_id=False)
     e.comments.append(Comment(text="test"))
     e.event_type = "explosion"
     self.assertEqual(len(e.comments), 1)
     self.assertEqual(e.event_type, "explosion")
     e.clear()
     self.assertEqual(e, Event(force_resource_id=False))
     self.assertEqual(len(e.comments), 0)
     self.assertEqual(e.event_type, None)
     # Test with pick object. Does not really fit in the event test case but
     # it tests the same thing...
     p = Pick()
     p.comments.append(Comment(text="test"))
     p.phase_hint = "p"
     self.assertEqual(len(p.comments), 1)
     self.assertEqual(p.phase_hint, "p")
     # Add some more random attributes. These should disappear upon
     # cleaning.
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         p.test_1 = "a"
         p.test_2 = "b"
         # two warnings should have been issued by setting non-default keys
         self.assertEqual(len(w), 2)
     self.assertEqual(p.test_1, "a")
     self.assertEqual(p.test_2, "b")
     p.clear()
     self.assertEqual(len(p.comments), 0)
     self.assertEqual(p.phase_hint, None)
     self.assertFalse(hasattr(p, "test_1"))
     self.assertFalse(hasattr(p, "test_2"))
    def build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False):
        """
        Build up an Event object
    
        Inputs
        ------
        evid       : int of EVID
        orid       : int of ORID
        delete     : bool of whether to mark event deleted (False)
        phase_data : bool of whether to include phase arrivals for event (False)
        focal_data : bool of whether to look for focal mechanisms (False)

        """
        #--- Build an Event based on params --------------------------------------
        if evid is None and orid:
            try:
                evid = self._evid(orid)
            except:
                pass
        # 1. Build a stub Event to send a delete
        if delete:
            self.event = Event(event_type="not existing")
            self.event.creation_info = CreationInfo(version=evid, creation_time=UTCDateTime())
            self.event.resource_id = self._rid(self.event)
        else:
            self._build(orid=orid, phases=phase_data, focals=focal_data, event_type="not reported")
            # if no EVID reported, try to get it from the db (version attribute)
            if not evid:
                evid = int(self.event.creation_info.version)
        # Add a nearest event string, try to set event type with custom etype additions
        prefor = self.event.preferred_origin()
        if prefor is not None:
            self.event.event_type = self.origin_event_type(prefor, emap=self.emap)
            ed = self.get_nearest_event_description(prefor.latitude, prefor.longitude)
            self.event.event_descriptions = [ed]
        # Generate NSL namespace attributes
        extra_attributes = self.quakeml_anss_attrib(evid)
        self.event.extra = self.extra_anss(**extra_attributes)
Example #44
0
def brightness(stations, nodes, lags, stream, threshold, thresh_type,
               template_length, template_saveloc, coherence_thresh,
               coherence_stations=['all'], coherence_clip=False,
               gap=2.0, clip_level=100, instance=0, pre_pick=0.2,
               plotsave=True, cores=1):
    r"""Function to calculate the brightness function in terms of energy for \
    a day of data over the entire network for a given grid of nodes.

    Note data in stream must be all of the same length and have the same
    sampling rates.

    :type stations: list
    :param stations: List of station names from in the form where stations[i] \
        refers to nodes[i][:] and lags[i][:]
    :type nodes: list, tuple
    :param nodes: List of node points where nodes[i] referes to stations[i] \
        and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \
        longitude in degrees, nodes[:][:][2] is depth in km.
    :type lags: :class: 'numpy.array'
    :param lags: Array of arrays where lags[i][:] refers to stations[i]. \
        lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \
        seconds.
    :type stream: :class: `obspy.Stream`
    :param data: Data through which to look for detections.
    :type threshold: float
    :param threshold: Threshold value for detection of template within the \
        brightness function
    :type thresh_type: str
    :param thresh_type: Either MAD or abs where MAD is the Median Absolute \
        Deviation and abs is an absoulte brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple of floats
    :param coherence_thresh: Threshold for removing incoherant peaks in the \
            network response, those below this will not be used as templates. \
            Must be in the form of (a,b) where the coherence is given by: \
            a-kchan/b where kchan is the number of channels used to compute \
            the coherence
    :type coherence_stations: list
    :param coherence_stations: List of stations to use in the coherance \
            thresholding - defaults to 'all' which uses all the stations.
    :type coherence_clip: float
    :param coherence_clip: tuple
    :type coherence_clip: Start and end in seconds of data to window around, \
            defaults to False, which uses all the data given.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotsave: bool
    :param plotsave: Save or show plots, if False will try and show the plots \
            on screen - as this is designed for bulk use this is set to \
            True to save any plots rather than show them if you create \
            them - changes the backend of matplotlib, so if is set to \
            False you will see NO PLOTS!
    :type cores: int
    :param core: Number of cores to use, defaults to 1.
    :type clip_level: float
    :param clip_level: Multiplier applied to the mean deviation of the energy \
                    as an upper limit, used to remove spikes (earthquakes, \
                    lightning, electircal spikes) from the energy stack.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections

    :return: list of templates as :class: `obspy.Stream` objects
    """
    from eqcorrscan.core.template_gen import _template_gen
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    # from joblib import Parallel, delayed
    from multiprocessing import Pool, cpu_count
    from copy import deepcopy
    from obspy import read as obsread
    from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin
    from obspy.core.event import EventDescription, CreationInfo, Comment
    import obspy.Stream
    import matplotlib.pyplot as plt
    from eqcorrscan.utils import plotting
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to converse memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise OverflowError('Too many streams, either re-code and cope with' +
                            'either more memory usage, or less precision, or' +
                            'reduce data volume')
    detections = []
    detect_lags = []
    parallel = True
    plotvar = True
    mem_issue = False
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    if not parallel:
        for i in range(0, len(nodes)):
            print(i)
            if not mem_issue:
                j, a = _node_loop(stations, lags[:, i], stream, plot=True)
                if 'energy' not in locals():
                    energy = a
                else:
                    energy = np.concatenate((energy, a), axis=0)
                print('energy: ' + str(np.shape(energy)))
            else:
                j, filename = _node_loop(stations, lags[:, i], stream, i,
                                         mem_issue)
        energy = np.array(energy)
        print(np.shape(energy))
    else:
        # Parallel run
        num_cores = cores
        if num_cores > len(nodes):
            num_cores = len(nodes)
        if num_cores > cpu_count():
            num_cores = cpu_count()
        pool = Pool(processes=num_cores)
        results = [pool.apply_async(_node_loop, args=(stations, lags[:, i],
                                                      stream, i, clip_level,
                                                      mem_issue, instance))
                   for i in range(len(nodes))]
        pool.close()
        if not mem_issue:
            print('Computing the cumulative network response from memory')
            energy = [p.get() for p in results]
            pool.join()
            energy.sort(key=lambda tup: tup[0])
            energy = [node[1] for node in energy]
            energy = np.concatenate(energy, axis=0)
            print(energy.shape)
        else:
            pool.join()
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indeces = np.argmax(energy, axis=0)  # Indeces of maximum energy
        print(indeces.shape)
        cum_net_resp = np.array([np.nan] * len(indeces))
        cum_net_resp[0] = energy[indeces[0]][0]
        peak_nodes = [nodes[indeces[0]]]
        for i in range(1, len(indeces)):
            cum_net_resp[i] = energy[indeces[i]][i]
            peak_nodes.append(nodes[indeces[i]])
        del energy, indeces
    else:
        print('Reading the temp files and computing network response')
        node_splits = int(len(nodes) // num_cores)
        indeces = [range(node_splits)]
        for i in range(1, num_cores - 1):
            indeces.append(range(node_splits * i, node_splits * (i + 1)))
        indeces.append(range(node_splits * (i + 1), len(nodes)))
        pool = Pool(processes=num_cores)
        results = [pool.apply_async(_cum_net_resp, args=(indeces[i], instance))
                   for i in range(num_cores)]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indeces = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indeces = np.argmax(cum_net_resp, axis=0)
        print(indeces.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array([cum_net_resp[indeces[i]][i]
                                 for i in range(len(indeces))])
        peak_nodes = [nodes[node_indeces[indeces[i]][i]]
                      for i in range(len(indeces))]
        del indeces, node_indeces
    if plotvar:
        cum_net_trace = deepcopy(stream[0])
        cum_net_trace.data = cum_net_resp
        cum_net_trace.stats.station = 'NR'
        cum_net_trace.stats.channel = ''
        cum_net_trace.stats.network = 'Z'
        cum_net_trace.stats.location = ''
        cum_net_trace.stats.starttime = stream[0].stats.starttime
        cum_net_trace = obspy.Stream(cum_net_trace)
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])
        # np.save('cum_net_resp.npy',cum_net_resp)
        #     cum_net_trace.plot(size=(800,600), equal_scale=False,\
        #                        outfile='NR_timeseries.eps')

    # Find detection within this network response
    print('Finding detections in the cumulatve network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections in to templates')
        # Generate a catalog of detections
        detections_cat = Catalog()
        for j, detection in enumerate(detections):
            print('Converting for detection ' + str(j) + ' of ' +
                  str(len(detections)))
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            print(node)
            # Look up node in nodes and find the associated lags
            index = nodes.index(node)
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + len(detect_lags))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(station_code=tr.stats.
                                                        station,
                                                        channel_code=tr.stats.
                                                        channel,
                                                        network_code='NA')
                        event.picks.append(Pick(waveform_id=_waveform_id,
                                                time=tr.stats.starttime +
                                                detect_lag +
                                                detection.detect_time +
                                                pre_pick,
                                                onset='emergent',
                                                evalutation_mode='automatic'))
            print('Generating template for detection: ' + str(j))
            template = (_template_gen(event.picks, copy_of_stream,
                        template_length, 'all'))
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherancy here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherant = True
            else:
                print('Template was incoherant, coherence level: ' +
                      str(temp_coher))
                coherant = False
            del copy_of_stream, tr, template
            if coherant:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            else:
                print('No template for you')
    if plotvar:
        all_detections = [(cum_net_trace[-1].stats.starttime +
                           detection.detect_time).datetime
                          for detection in detections]
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10), save=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    global CURRENT_TYPE

    base_name = os.path.basename(filename)

    if base_name.lower().startswith("baynet"):
        CURRENT_TYPE = "baynet"
    elif base_name.lower().startswith("earthworm"):
        CURRENT_TYPE = "earthworm"
    elif base_name.lower().startswith("gof"):
        CURRENT_TYPE = "seiscomp3"
    elif base_name.lower().startswith("obspyck") or base_name == "5622":
        CURRENT_TYPE = "obspyck"
    elif base_name.lower().startswith("toni"):
        CURRENT_TYPE = "toni"
    else:
        print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH"
        raise Exception

    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()
    catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id])

    # Read the event_type tag.
    account = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    public = parser.xpath2obj('event_type/public', parser, str)
    public = {"True": True, "False": False}.get(public, None)
    if account is not None and account.lower() != "sysop":
        public = False
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    author = user
    if CURRENT_TYPE in ["seiscomp3", "earthworm"]:
        author = CURRENT_TYPE
    creation_info = {"author": author,
        "agency_id": "Erdbebendienst Bayern",
        "agency_uri": "%s/agency" % RESOURCE_ROOT,
        "creation_time": NOW}

    # Create the event object.
    event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]),
        creation_info=creation_info)
    # If account is None or 'sysop' and public is true, write 'public in the
    # comment, 'private' otherwise.
    event.extra = AttribDict()
    event.extra.public = {'value': public, 'namespace': NAMESPACE}
    event.extra.evaluationMode = {'value': global_evaluation_mode, 'namespace': NAMESPACE}

    event_type = parser.xpath2obj('type', parser, str)
    if event_type is not None:
        if event_type == "induced earthquake":
            event_type = "induced or triggered event"
        if event_type != "null":
            event.event_type = event_type

    # Parse the origins.
    origins = parser.xpath("origin")
    if len(origins) > 1:
        msg = "Only files with a single origin are currently supported"
        raise Exception(msg)
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el, origin)
        if magnitude.mag is None:
            continue
        event.magnitudes.append(magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        if pick is None:
            continue
        event.picks.append(pick)
        # The arrival object gets the following things from the Seishub.pick
        # objects
        # arrival.time_weight = pick.phase_weight
        # arrival.time_residual = pick.phase_res
        # arrival.azimuth = pick.azimuth
        # arrival.take_off_angle = pick.incident
        # arrival.distance = hyp_dist
        arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick)
        if event.origins:
            event.origins[0].arrivals.append(arrival)

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)

    # Parse the amplitudes
    # we don't reference their id in the corresponding station magnitude,
    # because we use one amplitude measurement for each component
    for el in parser.xpath("stationMagnitude/amplitude"):
        event.amplitudes.append(__toAmplitude(parser, el))

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    for _i, stat_mag in enumerate(event.station_magnitudes):
        contrib = StationMagnitudeContribution()
        weight = None
        # The order of station magnitude objects is the same as in the xml
        # file.
        weight = parser.xpath2obj("weight",
            parser.xpath("stationMagnitude")[_i], float)
        if weight is not None:
            contrib.weight = weight
        contrib.station_magnitude_id = stat_mag.resource_id
        event.magnitudes[0].station_magnitude_contributions.append(contrib)

    for foc_mec_el in parser.xpath("focalMechanism"):
        foc_mec = __toFocalMechanism(parser, foc_mec_el)
        if foc_mec is not None:
            event.focal_mechanisms.append(foc_mec)

    # Set the origin id for the focal mechanisms. There is only one origin per
    # SeisHub event file.
    for focmec in event.focal_mechanisms:
        focmec.triggering_origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
Example #46
0
def _read_single_hypocenter(lines, coordinate_converter, original_picks):
    """
    Given a list of lines (starting with a 'NLLOC' line and ending with a
    'END_NLLOC' line), parse them into an Event.
    """
    try:
        # some paranoid checks..
        assert lines[0].startswith("NLLOC ")
        assert lines[-1].startswith("END_NLLOC")
        for line in lines[1:-1]:
            assert not line.startswith("NLLOC ")
            assert not line.startswith("END_NLLOC")
    except Exception:
        msg = ("This should not have happened, please report this as a bug at "
               "https://github.com/obspy/obspy/issues.")
        raise Exception(msg)

    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines[:-1]])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    # new NLLoc > 6.0 seems to add prefix 'run:' before date
    if date.startswith('run:'):
        date = date[4:]
    signature = signature.strip()
    creation_time = UTCDateTime.strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    if coordinate_converter:
        # maximum likelihood origin location in km info line
        line = lines["HYPOCENTER"]
        x, y, z = coordinate_converter(*map(float, line.split()[1:7:2]))
    else:
        # maximum likelihood origin location lon lat info line
        line = lines["GEOGRAPHIC"]
        y, x, z = map(float, line.split()[8:13:2])

    # maximum likelihood origin time info line
    line = lines["GEOGRAPHIC"]

    year, mon, day, hour, min = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, mon, day, hour, min, seconds, strict=False)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    if "COMMENT" in lines:
        comment = lines["COMMENT"].strip()
        comment = comment.strip('\'"')
        comment = comment.strip()

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    o = Origin()
    event.origins = [o]
    event.preferred_origin_id = o.resource_id
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string, force_resource_id=False))
    event.comments.append(Comment(text=comment, force_resource_id=False))

    # SIGNATURE field's first item is LOCSIG, which is supposed to be
    # 'Identification of an individual, institiution or other entity'
    # according to
    # http://alomax.free.fr/nlloc/soft6.00/control.html#_NLLoc_locsig_
    # so use it as author in creation info
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version,
                                       author=signature)
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version,
                                   author=signature)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        # network codes are not used by NonLinLoc, so they can not be known
        # when reading the .hyp file.. to conform with QuakeML standard set an
        # empty network code
        wid = WaveformStreamID(network_code="", station_code=station)
        # have to split this into ints for overflow to work correctly
        date, hourmin, sec = map(str, line[6:9])
        ymd = [int(date[:4]), int(date[4:6]), int(date[6:8])]
        hm = [int(hourmin[:2]), int(hourmin[2:4])]
        t = UTCDateTime(*(ymd + hm), strict=False) + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    event.scope_resource_ids()

    return event
Example #47
0
File: core.py Project: Qigaoo/obspy
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1: next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = (
                "Could not parse event %i (faulty file?). Will be "
                "skipped. Lines of the event:\n"
                "\t%s\n"
                "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(
            agency_id="GCMT",
            version=record["version_code"]
        )

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(
            force_resource_id=False,
            event_type="earthquake",
            event_type_certainty="known",
            event_descriptions=[
                EventDescription(text=region, type="Flinn-Engdahl region"),
                EventDescription(text=record["cmt_event_name"],
                                 type="earthquake name")
            ]
        )

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[Comment(text="Hypocenter catalog: %s" %
                              record["hypocenter_reference_catalog"],
                              force_resource_id=False)]
        )
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin", tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]},
            latitude=record["centroid_latitude"],
            latitude_errors={
                "uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000},
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy()
        )
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(
            force_resource_id=False,
            mag=round(record["Mw"], 2),
            magnitude_type="Mwc",
            origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude", tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["mb"],
            magnitude_type="mb",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'mb'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["MS"],
            magnitude_type="MS",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'MS'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(
            m_rr=record["m_rr"],
            m_rr_errors={"uncertainty": record["m_rr_error"]},
            m_pp=record["m_pp"],
            m_pp_errors={"uncertainty": record["m_pp_error"]},
            m_tt=record["m_tt"],
            m_tt_errors={"uncertainty": record["m_tt_error"]},
            m_rt=record["m_rt"],
            m_rt_errors={"uncertainty": record["m_rt_error"]},
            m_rp=record["m_rp"],
            m_rp_errors={"uncertainty": record["m_rp_error"]},
            m_tp=record["m_tp"],
            m_tp_errors={"uncertainty": record["m_tp_error"]},
            creation_info=creation_info.copy()
        )
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]
            ),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]
            ),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])
            ),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                             record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" %
                             record["cmt_timestamp"])],
            creation_info=creation_info.copy()
        )
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"],
                                             "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #48
0
File: core.py Project: mbyt/obspy
def _readheader(f):
    """
    Internal header reader.
    :type f: file
    :param f: File open in read-mode.

    :returns: :class:`~obspy.core.event.event.Event`
    """
    f.seek(0)
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = _get_headline(f=f)
    if not topline:
        raise NordicParsingError('No header found, or incorrect '
                                 'formatting: corrupt s-file')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        NordicParsingError("Couldn't read a date from sfile")
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    try:
        new_event.origins[0].latitude = float(topline[23:30])
        new_event.origins[0].longitude = float(topline[31:38])
        new_event.origins[0].depth = float(topline[39:43]) * 1000
    except ValueError:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = None
        new_event.origins[0].longitude = None
        new_event.origins[0].depth = None
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].strip())
    ksta = Comment(text='Number of stations=' + topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    if _float_conv(topline[51:55]) is not None:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    for index in [59, 67, 75]:
        if not topline[index].isspace():
            new_event.magnitudes.append(Magnitude())
            new_event.magnitudes[-1].mag = _float_conv(
                topline[index - 3:index])
            new_event.magnitudes[-1].magnitude_type = \
                _nortoevmag(topline[index])
            new_event.magnitudes[-1].creation_info = \
                CreationInfo(agency_id=topline[index + 1:index + 4].strip())
            new_event.magnitudes[-1].origin_id = new_event.origins[0].\
                resource_id
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = new_event.origins[0].resource_id
    try:
        # Select moment first, then local, then
        mag_filter = ['MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb',
                      'MS', 'Ms', 'MC', 'Mc']
        _magnitudes = [(m.magnitude_type, m.resource_id)
                       for m in new_event.magnitudes]
        preferred_magnitude = sorted(_magnitudes,
                                     key=lambda x: mag_filter.index(x[0]))[0]
        new_event.preferred_magnitude_id = preferred_magnitude[1]
    except (ValueError, IndexError):
        # If there is a magnitude not specified in filter
        try:
            new_event.preferred_magnitude_id = new_event.magnitudes[0].\
                resource_id
        except IndexError:
            pass
    return new_event
Example #49
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot, latitude=lat, longitude=lon,
                   depth=depjma, depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name,
                                         'origin', 'JMA')
    m_jma = Magnitude(mag=magjma, magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name,
                                         'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot, latitude=lat, longitude=lon,
                  depth=depmt, region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name,
                                        'origin', 'MT')
    m_mt = Magnitude(mag=magmt, magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name,
                                        'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name,
                                           "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo, comments=[cm],
                      tensor=tensor, variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name,
                                      'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Example #50
0
def __read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: Open file or open file like object.
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:4].strip().decode()

    origin_time = line[4:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line.split()[7:]
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary"
    )

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag, magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag, magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = ["time_shift", "half_duration", "latitude", "longitude",
              "depth", "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    cmt_values = {_i: float(buf.readline().strip().split()[-1])
                  for _i in values}

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"] ** 2 +
        cmt_values["m_tt"] ** 2 +
        cmt_values["m_pp"] ** 2 +
        2.0 * cmt_values["m_rt"] ** 2 +
        2.0 * cmt_values["m_rp"] ** 2 +
        2.0 * cmt_values["m_tp"] ** 2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id
    )

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id
    )

    tensor = Tensor(
        m_rr=cmt_values["m_rr"],
        m_pp=cmt_values["m_pp"],
        m_tt=cmt_values["m_tt"],
        m_rt=cmt_values["m_rt"],
        m_rp=cmt_values["m_rp"],
        m_tp=cmt_values["m_tp"]
    )

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"]
    )

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf
    )

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(EventDescription(text=event_name,
                                                  type="earthquake name"))
    ev.comments.append(Comment(
        text="Hypocenter catalog: %s" % hypocenter_catalog,
        force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    return ev
Example #51
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
class DBToQuakemlConverter(AntelopeToEventConverter):
    """
    Antelope -> Event converter with customizations for writing QuakeML files
    
    Methods
    -------
    build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False):
        Build up an Event using various parameters
    quakeml_str(): Return QuakeML string of the current Event object
    quakeml_anss_attrib(self, evid=None): Construct dict of ANSS attributes
    quakeml_filename(self, product): Try to construct a meaningful XML filename

    """
    rid_factory = rid_function

    def quakeml_anss_attrib(self, evid=None):
        """
        Returns stuff necessary for quakeml files
        
        These things are specific to a datacenter, in an effort to generalize
        the actual writer function as much as possible.
        
        Input
        -----
        evid   : int of some event identifier to name the file 
        agency : str of name or code of agency creating file (netcode)
        
        Returns : dict of the 4 ANSS 'catalog' attributes with meaningful values.
        """
        agency_code = self.agency.lower()
        if evid:
            anss_id = '{0:08d}'.format(evid)
        else:
            anss_id = '00000000'
        return {'datasource' : agency_code, 'dataid' : agency_code + anss_id, 'eventsource' : agency_code, 'eventid' : anss_id}

    def quakeml_filename(self, product):
        return self.event.extra['dataid']['value'] + '_' + product + '.xml'

    def extra_anss(self, **kwargs):
        """
        Create an dictionary for ANSS vars for use by event classes 'extra' attribute
        
        Inputs
        ------
        kwargs SHOULD be one of ('datasource','dataid','eventsource','eventid')
        
        Returns : dict of obspy 'extra' format

        """
        # in new "extra" patch, use both for now
        # NOTE: Obspy 0.9.3+ should support this natively, NO PATCH!!
        # - '_namespace' renamed to 'namespace' 
        # - '_type renamed' to 'type'
        extra_attrib = {} 
        ns_anss = 'http://anss.org/xmlns/catalog/0.1'
        self.nsmap.update({'catalog': ns_anss})
        for a in kwargs:
            extra_attrib[a] = {'value': kwargs[a],
                               'namespace': ns_anss,
                               'type': 'attribute'}
        return extra_attrib

    def build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False):
        """
        Build up an Event object
    
        Inputs
        ------
        evid       : int of EVID
        orid       : int of ORID
        delete     : bool of whether to mark event deleted (False)
        phase_data : bool of whether to include phase arrivals for event (False)
        focal_data : bool of whether to look for focal mechanisms (False)

        """
        #--- Build an Event based on params --------------------------------------
        if evid is None and orid:
            try:
                evid = self._evid(orid)
            except:
                pass
        # 1. Build a stub Event to send a delete
        if delete:
            self.event = Event(event_type="not existing")
            self.event.creation_info = CreationInfo(version=evid, creation_time=UTCDateTime())
            self.event.resource_id = self._rid(self.event)
        else:
            self._build(orid=orid, phases=phase_data, focals=focal_data, event_type="not reported")
            # if no EVID reported, try to get it from the db (version attribute)
            if not evid:
                evid = int(self.event.creation_info.version)
        # Add a nearest event string, try to set event type with custom etype additions
        prefor = self.event.preferred_origin()
        if prefor is not None:
            self.event.event_type = self.origin_event_type(prefor, emap=self.emap)
            ed = self.get_nearest_event_description(prefor.latitude, prefor.longitude)
            self.event.event_descriptions = [ed]
        # Generate NSL namespace attributes
        extra_attributes = self.quakeml_anss_attrib(evid)
        self.event.extra = self.extra_anss(**extra_attributes)

    @classmethod
    def _qmls(cls, c):
        """
        Writes Catalog object to QuakeML string

        Inputs
        ------
        c : obspy.core.event.Catalog

        Returns : str of QuakeML file contents

        """
        return Pickler(nsmap=cls.nsmap).dumps(c)

    def quakeml_str(self):
        """
        Return QuakeML string of current Event object

        :returns: str of QuakeML file contents

        """
        return self._qmls(self.catalog)
Example #53
0
File: core.py Project: bmorg/obspy
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left `None` NonLinLoc (x, y, z) output is left unchanged (e.g. if
        it is in geographical coordinates already like for NonLinLoc in
        global mode).
        The function should accept three arguments x, y, z and return a
        tuple of three values (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        `pick_id` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    lines = data.splitlines()

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    # determine indices of block start/end of the NLLOC output file
    indices_hyp = [None, None]
    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("NLLOC "):
            indices_hyp[0] = i
        elif line.startswith("END_NLLOC"):
            indices_hyp[1] = i
        elif line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i
    if any([i is None for i in indices_hyp]):
        msg = ("NLLOC HYP file seems corrupt,"
               " could not detect 'NLLOC' and 'END_NLLOC' lines.")
        raise RuntimeError(msg)
    # strip any other lines around NLLOC block
    lines = lines[indices_hyp[0]:indices_hyp[1]]

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    # maximum likelihood origin location info line
    line = lines["HYPOCENTER"]

    x, y, z = map(float, line.split()[1:7:2])

    if coordinate_converter:
        x, y, z = coordinate_converter(x, y, z)

    # origin time info line
    line = lines["GEOGRAPHIC"]

    year, month, day, hour, minute = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, month, day, hour, minute, seconds)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_XX = float(line.split()[7])
    covariance_YY = float(line.split()[13])
    covariance_ZZ = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    cat = Catalog(events=[event])
    o = Origin()
    event.origins = [o]
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string))

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version)
    event.creation_info.version = version
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_XX))
    except ValueError:
        if covariance_XX < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_YY))
    except ValueError:
        if covariance_YY < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_ZZ) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        wid = WaveformStreamID(station_code=station)
        date, hourmin, sec = map(str, line[6:9])
        t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    return cat
Example #54
0
def readheader(sfile):
    """
    Read header information from a seisan nordic format S-file.
    Returns an obspy.core.event.Catalog type: note this changed for version \
    0.1.0 from the inbuilt class types.

    :type sfile: str
    :param sfile: Path to the s-file

    :returns: :class: obspy.core.event.Event

    >>> event = readheader('eqcorrscan/tests/test_data/REA/TEST_/' +
    ...                    '01-0411-15L.S201309')
    >>> print(event.origins[0].time)
    2013-09-01T04:11:15.700000Z
    """
    import warnings
    from obspy.core.event import Event, Origin, Magnitude, Comment
    from obspy.core.event import EventDescription, CreationInfo
    f = open(sfile, 'r')
    # Base populate to allow for empty parts of file
    new_event = Event()
    topline = f.readline()
    if not len(topline.rstrip()) == 80:
        raise IOError('s-file has a corrupt header, not 80 char long')
    f.seek(0)
    for line in f:
        if line[79] in [' ', '1']:
            topline = line
            break
        if line[79] == '7':
            raise IOError('No header found, corrupt s-file?')
    try:
        sfile_seconds = int(topline[16:18])
        if sfile_seconds == 60:
            sfile_seconds = 0
            add_seconds = 60
        else:
            add_seconds = 0
        new_event.origins.append(Origin())
        new_event.origins[0].time = UTCDateTime(int(topline[1:5]),
                                                int(topline[6:8]),
                                                int(topline[8:10]),
                                                int(topline[11:13]),
                                                int(topline[13:15]),
                                                sfile_seconds,
                                                int(topline[19:20]) *
                                                100000)\
            + add_seconds
    except:
        warnings.warn("Couldn't read a date from sfile: " + sfile)
        new_event.origins.append(Origin(time=UTCDateTime(0)))
    # new_event.loc_mod_ind=topline[20]
    new_event.event_descriptions.append(EventDescription())
    new_event.event_descriptions[0].text = topline[21:23]
    # new_event.ev_id=topline[22]
    if not _float_conv(topline[23:30]) == 999:
        new_event.origins[0].latitude = _float_conv(topline[23:30])
        new_event.origins[0].longitude = _float_conv(topline[31:38])
        new_event.origins[0].depth = _float_conv(topline[39:43]) * 1000
    else:
        # The origin 'requires' a lat & long
        new_event.origins[0].latitude = float('NaN')
        new_event.origins[0].longitude = float('NaN')
        new_event.origins[0].depth = float('NaN')
    # new_event.depth_ind = topline[44]
    # new_event.loc_ind = topline[45]
    new_event.creation_info = CreationInfo(agency_id=topline[45:48].
                                           strip())
    ksta = Comment(text='Number of stations=' +
                   topline[49:51].strip())
    new_event.origins[0].comments.append(ksta)
    # new_event.origins[0].nsta??? = _int_conv(topline[49:51])
    if not _float_conv(topline[51:55]) == 999:
        new_event.origins[0].time_errors['Time_Residual_RMS'] = \
            _float_conv(topline[51:55])
    # Read in magnitudes if they are there.
    if len(topline[59].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[0].mag = _float_conv(topline[56:59])
        new_event.magnitudes[0].magnitude_type = topline[59]
        new_event.magnitudes[0].creation_info = \
            CreationInfo(agency_id=topline[60:63].strip())
        new_event.magnitudes[0].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[67].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[1].mag = _float_conv(topline[64:67])
        new_event.magnitudes[1].magnitude_type = topline[67]
        new_event.magnitudes[1].creation_info = \
            CreationInfo(agency_id=topline[68:71].strip())
        new_event.magnitudes[1].origin_id = new_event.origins[0].\
            resource_id
    if len(topline[75].strip()) > 0:
        new_event.magnitudes.append(Magnitude())
        new_event.magnitudes[2].mag = _float_conv(topline[72:75])
        new_event.magnitudes[2].magnitude_type = topline[75]
        new_event.magnitudes[2].creation_info = \
            CreationInfo(agency_id=topline[76:79].strip())
        new_event.magnitudes[2].origin_id = new_event.origins[0].\
            resource_id
    f.close()
    # convert the nordic notation of magnitude to more general notation
    for _magnitude in new_event.magnitudes:
        _magnitude.magnitude_type = _nortoevmag(_magnitude.magnitude_type)
    # Set the useful things like preferred magnitude and preferred origin
    new_event.preferred_origin_id = str(new_event.origins[0].resource_id)
    if len(new_event.magnitudes) > 1:
        try:
            # Select moment first, then local, then
            mag_filter = ['MW', 'Mw', 'ML', 'Ml', 'MB', 'Mb',
                          'MS', 'Ms', 'Mc', 'MC']
            _magnitudes = [(m.magnitude_type, m.resource_id)
                           for m in new_event.magnitudes]
            preferred_magnitude = sorted(_magnitudes,
                                         key=lambda x: mag_filter.index(x[0]))
            new_event.preferred_magnitude_id = str(preferred_magnitude[0][1])
        except ValueError:
            # If there is a magnitude not specified in filter
            new_event.preferred_magnitude_id =\
                str(new_event.magnitudes[0].resource_id)
    elif len(new_event.magnitudes) == 1:
        new_event.preferred_magnitude_id =\
            str(new_event.magnitudes[0].resource_id)
    return new_event
Example #55
0
def request_gcmt(starttime, endtime, minmagnitude=None, mindepth=None, maxdepth=None, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None):
	import mechanize
	from mechanize import Browser
	import re

	"""
	Description
	I am using mechanize. My attempt is just preliminary, for the current globalcmt.org site. 
	"""

	#Split numbers and text
	r = re.compile("([a-zA-Z]+)([0-9]+)")


	br = Browser()
	br.open('http://www.globalcmt.org/CMTsearch.html')
	#Site has just one form
	br.select_form(nr=0)

	br.form['yr']    = str(starttime.year)
	br.form['mo']    = str(starttime.month)
	br.form['day']   = str(starttime.day)
	br.form['oyr']   = str(endtime.year)
	br.form['omo']   = str(endtime.month)
	br.form['oday']  = str(endtime.day)
	br.form['list']  = ['4']
	br.form['itype'] = ['ymd']
	br.form['otype'] = ['ymd']

	if minmagnitude: br.form['lmw']   = str(minmagnitude)
	if minlatitude : br.form['llat']  = str(minlatitude)
	if maxlatitude : br.form['ulat']  = str(maxlatitude)
	if minlongitude: br.form['llon']  = str(minlongitude)
	if maxlongitude: br.form['ulon']  = str(maxlongitude)
	if mindepth    : br.form['lhd']   = str(mindepth)
	if maxdepth    : br.form['uhd']   = str(maxdepth)

	print("Submitting parameters to globalcmt.")
	req = br.submit()
	print("Retrieving data, creating catalog.")

	data = []
	for line in req:
		data.append(line) 

	data_chunked = _chunking_list(keyword='\n', list=data)
	origins = []
	magnitudes = []
	tensor = []

	for line in data_chunked:
		for element in line:
			if 'event name' in element:
				for content in element:
					org       = line[1].split()
					year      = int(r.match(org[0]).groups()[1])
					mon       = int(org[1])
					day       = int(org[2])
					hour      = int(org[3])
					minute    = int(org[4])
					sec_temp  = int(org[5].split('.')[0])
					msec_temp = int(org[5].split('.')[1])

				origins_temp = UTCDateTime(year, mon, day, hour, minute, sec_temp, msec_temp)
				#adding time shift located in line[3]
				origin       = origins_temp + float(line[3].split()[2])
				magnitude    = float(line[1].split()[10])
				latitude     = float(line[5].split()[1])
				longitude    = float(line[6].split()[1])
				depth        = 1000. * float(line[7].split()[1])
				m_rr         = float(line[8].split()[1])
				m_tt         = float(line[9].split()[1])
				m_pp         = float(line[10].split()[1])
				m_rt         = float(line[11].split()[1])
				m_rp         = float(line[12].split()[1])
				m_tp         = float(line[13].split()[1])

				magnitudes.append( ("Mw", magnitude) )
				origins.append( (latitude, longitude, depth, origin) )
				tensor.append( (m_rr, m_tt, m_pp, m_rt, m_rp, m_tp) )

	cat = Catalog()

	for mag, org, ten in zip(magnitudes, origins, tensor):
		# Create magnitude object.
		magnitude = Magnitude()


		magnitude.magnitude_type = mag[0]
		magnitude.mag = mag[1]
		# Write origin object.
		origin = Origin()
		origin.latitude = org[0]
		origin.longitude = org[1]
		origin.depth = org[2]
		origin.time = org[3]
		# Create event object and append to catalog object.
		event = Event()
		event.magnitudes.append(magnitude)
		event.origins.append(origin)

		event.MomentTensor = MomentTensor()
		event.MomentTensor.m_rr = ten[0]
		event.MomentTensor.m_tt = ten[1]
		event.MomentTensor.m_pp = ten[2]
		event.MomentTensor.m_rt = ten[3]
		event.MomentTensor.m_rp = ten[4]
		event.MomentTensor.m_tp = ten[5]

		cat.append(event)

	return cat
Example #56
0
def full_test_event():
    """
    Function to generate a basic, full test event
    """
    test_event = Event()
    test_event.origins.append(Origin(
        time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0,
        depth=15000))
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.magnitudes.append(Magnitude(
        mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'),
        origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(Magnitude(
        mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'),
        origin_id=test_event.origins[0].resource_id))
    test_event.magnitudes.append(Magnitude(
        mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'),
        origin_id=test_event.origins[0].resource_id))

    # Define the test pick
    _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
                                      network_code='NZ')
    _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
                                      network_code=' ')
    # Pick to associate with amplitude - 0
    test_event.picks = [
        Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
             polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
             evaluation_mode="manual"),
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
             evaluation_mode="automatic")]
    # Test a generic local magnitude amplitude pick
    test_event.amplitudes = [
        Amplitude(generic_amplitude=2.0, period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id, unit='m',
                  magnitude_hint='ML', category='point', type='AML'),
        Amplitude(generic_amplitude=10,
                  pick_id=test_event.picks[1].resource_id,
                  waveform_id=test_event.picks[1].waveform_id, type='END',
                  category='duration', unit='s', magnitude_hint='Mc',
                  snr=2.3),
        Amplitude(generic_amplitude=5.0, period=0.6,
                  pick_id=test_event.picks[2].resource_id,
                  waveform_id=test_event.picks[0].waveform_id, unit='m',
                  category='point', type='AML')]
    test_event.origins[0].arrivals = [
        Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
                pick_id=test_event.picks[1].resource_id),
        Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
                pick_id=test_event.picks[3].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25),
        Arrival(time_weight=2, phase=test_event.picks[4].phase_hint,
                pick_id=test_event.picks[4].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25)]
    # Add in error info (line E)
    test_event.origins[0].quality = OriginQuality(
        standard_error=0.01, azimuthal_gap=36)
    # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has
    # semi-major and semi-minor
    test_event.origins[0].origin_uncertainty = OriginUncertainty(
        confidence_ellipsoid=ConfidenceEllipsoid(
            semi_major_axis_length=3000, semi_minor_axis_length=1000,
            semi_intermediate_axis_length=2000, major_axis_plunge=20,
            major_axis_azimuth=100, major_axis_rotation=4))
    test_event.origins[0].time_errors = QuantityError(uncertainty=0.5)
    # Add in fault-plane solution info (line F) - Note have to check program
    # used to determine which fields are filled....
    test_event.focal_mechanisms.append(FocalMechanism(
        nodal_planes=NodalPlanes(nodal_plane_1=NodalPlane(
            strike=180, dip=20, rake=30, strike_errors=QuantityError(10),
            dip_errors=QuantityError(10), rake_errors=QuantityError(20))),
        method_id=ResourceIdentifier("smi:nc.anss.org/focalMechanism/FPFIT"),
        creation_info=CreationInfo(agency_id="NC"), misfit=0.5,
        station_distribution_ratio=0.8))
    # Need to test high-precision origin and that it is preferred origin.
    # Moment tensor includes another origin
    test_event.origins.append(Origin(
        time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2,
        depth=14500))
    test_event.magnitudes.append(Magnitude(
        mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'),
        origin_id=test_event.origins[-1].resource_id))
    # Moment tensors go with focal-mechanisms
    test_event.focal_mechanisms.append(FocalMechanism(
        moment_tensor=MomentTensor(
            derived_origin_id=test_event.origins[-1].resource_id,
            moment_magnitude_id=test_event.magnitudes[-1].resource_id,
            scalar_moment=100, tensor=Tensor(
                m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15),
            method_id=ResourceIdentifier(
                'smi:nc.anss.org/momentTensor/BLAH'))))
    return test_event
Example #57
0
def full_test_event():
    """
    Function to generate a basic, full test event
    """
    test_event = Event()
    test_event.origins.append(Origin())
    test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.origins[0].latitude = 45.0
    test_event.origins[0].longitude = 25.0
    test_event.origins[0].depth = 15000
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.origins[0].quality = OriginQuality(standard_error=0.01)
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[0].mag = 0.1
    test_event.magnitudes[0].magnitude_type = 'ML'
    test_event.magnitudes[0].creation_info = CreationInfo('TES')
    test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[1].mag = 0.5
    test_event.magnitudes[1].magnitude_type = 'Mc'
    test_event.magnitudes[1].creation_info = CreationInfo('TES')
    test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[2].mag = 1.3
    test_event.magnitudes[2].magnitude_type = 'Ms'
    test_event.magnitudes[2].creation_info = CreationInfo('TES')
    test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id

    # Define the test pick
    _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
                                      network_code='NZ')
    _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
                                      network_code=' ')
    # Pick to associate with amplitude
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Need a second pick for coda
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
             polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
             evaluation_mode="automatic"))
    # Test a generic local magnitude amplitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=2.0, period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id, unit='m',
                  magnitude_hint='ML', category='point', type='AML'))
    # Test a coda magnitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=10,
                  pick_id=test_event.picks[1].resource_id,
                  waveform_id=test_event.picks[1].waveform_id, type='END',
                  category='duration', unit='s', magnitude_hint='Mc',
                  snr=2.3))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
                pick_id=test_event.picks[1].resource_id))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2, phase=test_event.picks[2].phase_hint,
                pick_id=test_event.picks[2].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
                pick_id=test_event.picks[3].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25))
    return test_event

    def test_nortoevmag(self):
        self.assertEqual(_nortoevmag('b'), 'mB')
        # raises "UserWarning: bob is not convertible"
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always', UserWarning)
            self.assertEqual(_nortoevmag('bob'), '')
        self.assertEquals(len(w), 1)
        self.assertEquals('bob is not convertible', w[0].messages)

    def test_evmagtonor(self):
        self.assertEqual(_evmagtonor('mB'), 'B')
        self.assertEqual(_evmagtonor('M'), 'W')
        # raises "UserWarning: bob is not convertible"
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always', UserWarning)
            self.assertEqual(_evmagtonor('bob'), '')
        self.assertEquals(len(w), 1)
        self.assertEquals('bob is not convertible', w[0].messages)
Example #58
0
def full_test_event():
    """
    Function to generate a basic, full test event
    """
    test_event = Event()
    test_event.origins.append(Origin())
    test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.origins[0].latitude = 45.0
    test_event.origins[0].longitude = 25.0
    test_event.origins[0].depth = 15000
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[0].mag = 0.1
    test_event.magnitudes[0].magnitude_type = 'ML'
    test_event.magnitudes[0].creation_info = CreationInfo('TES')
    test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[1].mag = 0.5
    test_event.magnitudes[1].magnitude_type = 'Mc'
    test_event.magnitudes[1].creation_info = CreationInfo('TES')
    test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[2].mag = 1.3
    test_event.magnitudes[2].magnitude_type = 'Ms'
    test_event.magnitudes[2].creation_info = CreationInfo('TES')
    test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id

    # Define the test pick
    _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ',
                                      network_code='NZ')
    _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1',
                                      network_code=' ')
    # Pick to associate with amplitude
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1, phase_hint='IAML',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Need a second pick for coda
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN',
             polarity='positive', time=UTCDateTime("2012-03-26") + 1.68,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72,
             evaluation_mode="manual"))
    # Unassociated pick
    test_event.picks.append(
        Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN',
             polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62,
             evaluation_mode="automatic"))
    # Test a generic local magnitude amplitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=2.0, period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id, unit='m',
                  magnitude_hint='ML', category='point', type='AML'))
    # Test a coda magnitude pick
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=10,
                  pick_id=test_event.picks[1].resource_id,
                  waveform_id=test_event.picks[1].waveform_id, type='END',
                  category='duration', unit='s', magnitude_hint='Mc',
                  snr=2.3))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=0, phase=test_event.picks[1].phase_hint,
                pick_id=test_event.picks[1].resource_id))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2, phase=test_event.picks[2].phase_hint,
                pick_id=test_event.picks[2].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2, phase=test_event.picks[3].phase_hint,
                pick_id=test_event.picks[3].resource_id,
                backazimuth_residual=5, time_residual=0.2, distance=15,
                azimuth=25))
    return test_event
Example #59
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event
Example #60
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
	event = Event(focal_mechanisms=[], picks=[], origins=[])
	origin = Origin(arrivals=[])
	origin.time = UTCDateTime(hp.tstamp)
	origin.latitude = hp.qlat
	origin.longitude = hp.qlon
	origin.depth = hp.qdep
	origin.creation_info = CreationInfo(version=hp.icusp)
	origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp))
	for _i in range(n):
	    p = Pick()
	    p.creation_info = CreationInfo(version=hp.arid[_i])
	    p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version))
	    p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i])
	    if hp.p_pol[_i] > 0:
		p.polarity = 'positive'
	    else:
		p.polarity = 'negative'
	    a = Arrival()
	    a.creation_info = CreationInfo(version=hp.arid[_i])
	    a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version))
	    a.azimuth = hp.p_azi_mc[_i,0]
	    a.takeoff_angle = 180. - hp.p_the_mc[_i,0]
	    a.pick_id = p.resource_id
	    origin.arrivals.append(a)
	    event.picks.append(p)
	event.origins.append(origin)
	event.preferred_origin_id = str(origin.resource_id)
    else: # just update the changes
	origin = event.preferred_origin()
	picks = []
	arrivals = []
	for _i in range(n):
	    ind = hp.p_index[_i]
	    a = origin.arrivals[ind]
	    p = a.pick_id.getReferredObject()
	    a.takeoff_angle = hp.p_the_mc[_i,0]
	    picks.append(p)
	    arrivals.append(a)
	if only_fm_picks:
	    origin.arrivals = arrivals
	    event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality'))
            )
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
    return event