Example #1
0
 def _get_creation_info(self):
     creation_info = CreationInfo(creation_time=UTCDateTime())
     if self.agency:
         creation_info.agency_id = self.agency
     if self.author:
         creation_info.author = self.author
     return creation_info
Example #2
0
 def _get_creation_info(self):
     creation_info = CreationInfo(creation_time=UTCDateTime())
     if self.agency:
         creation_info.agency_id = self.agency
     if self.author:
         creation_info.author = self.author
     return creation_info
Example #3
0
    def _parseRecordAE(self, line, event):
        """
        Parses the 'additional hypocenter error and magnitude record' AE
        """
        orig_time_stderr = self._floatUnused(line[2:7])
        latitude_stderr = self._floatUnused(line[8:14])
        longitude_stderr = self._floatUnused(line[15:21])
        depth_stderr = self._floatUnused(line[22:27])
        gap = self._floatUnused(line[28:33])
        mag1 = self._float(line[33:36])
        mag1_type = line[36:38]
        mag2 = self._float(line[43:46])
        mag2_type = line[46:48]

        evid = event.resource_id.id.split('/')[-1]
        #this record is to be associated to the latest origin
        origin = event.origins[-1]
        self._storeUncertainty(origin.time_errors, orig_time_stderr)
        self._storeUncertainty(origin.latitude_errors,
                               self._latErrToDeg(latitude_stderr))
        self._storeUncertainty(
            origin.longitude_errors,
            self._lonErrToDeg(longitude_stderr, origin.latitude))
        self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
        origin.quality.azimuthal_gap = gap
        if mag1 > 0:
            mag = Magnitude()
            mag1_id = mag1_type.lower()
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(
                agency_id=origin.creation_info.agency_id)
            mag.mag = mag1
            mag.magnitude_type = mag1_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
        if mag2 > 0:
            mag = Magnitude()
            mag2_id = mag2_type.lower()
            if mag2_id == mag1_id:
                mag2_id += '2'
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(
                agency_id=origin.creation_info.agency_id)
            mag.mag = mag2
            mag.magnitude_type = mag2_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
Example #4
0
 def create_catalog(time):
     """
     Create a events object with a UTCTimeObject as event creation info.
     """
     creation_info = CreationInfo(creation_time=obspy.UTCDateTime(time))
     event = Event(creation_info=creation_info)
     return Catalog(events=[event])
Example #5
0
    def __init__(self, event_xml, **kwargs):
        self.event_xml = event_xml
        self.cat = read_events(event_xml)
        self.orig_events = self.cat.events
        self.events = None

        new_comments = kwargs.get("comments", [])
        self.comments = new_comments + self.cat.comments
        self._set_resource_id(kwargs.get("resource_id", None))
        new_description = kwargs.get("description", "PST ILocCatalog Modified")
        self.description = (('orig_description: '
                             + self.cat.description + '; ')
                            if self.cat.description is not None else '') + \
                           ('description: ' + new_description)
        old_ci = self.cat.creation_info

        self.creation_info = CreationInfo(
            agency_id=old_ci.agency_id,
            agency_uri=old_ci.agency_uri,
            author=(('orig_author: ' + old_ci.author + '; ')
                    if old_ci.author is not None else '') + 'PST ILocCatalog',
            creation_info=UTCDateTime()
        )

        super(ILocCatalog, self).__init__(
            events=self.events,
            comments=self.comments,
            creation_info=self.creation_info,
            description=self.description,
            resource_id=self.resource_id
        )
Example #6
0
def _read_common_header(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    Parses the first few common header lines and sets creation time and some
    other basic info.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC out
        file.
    """
    event = Event()
    # parse time.. too much bother to mess around with switching locales, so do
    # it manually.. example:
    # "  Fri Sep  8 14:54:58 2017 for program Focmec"
    month, day, time_of_day, year = lines[0].split()[1:5]
    year = int(year)
    day = int(day)
    month = int(MONTHS[month.lower()])
    hour, minute, second = [int(x) for x in time_of_day.split(':')]
    event.creation_info = CreationInfo()
    event.creation_info.creation_time = UTCDateTime(year, month, day, hour,
                                                    minute, second)
    # get rid of those common lines already parsed
    lines = lines[4:]
    return event, lines
Example #7
0
    def _parseRecordAH(self, line, event):
        """
        Parses the 'additional hypocenter' record AH
        """
        date = line[2:10]
        time = line[11:20]
        #unused: hypocenter_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        #unused: preliminary_flag = line[37]
        depth = self._float(line[38:43])
        #unused: depth_quality = line[43]
        standard_dev = self._floatUnused(line[44:48])
        station_number = self._intUnused(line[48:51])
        phase_number = self._intUnused(line[51:55])
        source_code = line[56:60].strip()

        evid = event.resource_id.id.split('/')[-1]
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid, source_code.lower()))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo(agency_id=source_code)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinateSign(lat_type)
        origin.longitude = longitude * self._coordinateSign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.standard_error = standard_dev
        origin.quality.used_station_count = station_number
        origin.quality.used_phase_count = phase_number
        origin.type = 'hypocenter'
        event.origins.append(origin)
Example #8
0
    def _parse_record_hy(self, line):
        """
        Parses the 'hypocenter' record HY
        """
        date = line[2:10]
        time = line[11:20]
        # unused: location_quality = line[20]
        latitude = self._float(line[21:27])
        lat_type = line[27]
        longitude = self._float(line[29:36])
        lon_type = line[36]
        depth = self._float(line[38:43])
        # unused: depth_quality = line[43]
        standard_dev = self._float(line[44:48])
        station_number = self._int(line[48:51])
        # unused: version_flag = line[51]
        fe_region_number = line[52:55]
        fe_region_name = self._decode_fe_region_number(fe_region_number)
        source_code = line[55:60].strip()

        event = Event()
        # FIXME: a smarter way to define evid?
        evid = date + time
        res_id = '/'.join((res_id_prefix, 'event', evid))
        event.resource_id = ResourceIdentifier(id=res_id)
        description = EventDescription(
            type='region name',
            text=fe_region_name)
        event.event_descriptions.append(description)
        description = EventDescription(
            type='Flinn-Engdahl region',
            text=fe_region_number)
        event.event_descriptions.append(description)
        origin = Origin()
        res_id = '/'.join((res_id_prefix, 'origin', evid))
        origin.resource_id = ResourceIdentifier(id=res_id)
        origin.creation_info = CreationInfo()
        if source_code:
            origin.creation_info.agency_id = source_code
        else:
            origin.creation_info.agency_id = 'USGS-NEIC'
        res_id = '/'.join((res_id_prefix, 'earthmodel/ak135'))
        origin.earth_model_id = ResourceIdentifier(id=res_id)
        origin.time = UTCDateTime(date + time)
        origin.latitude = latitude * self._coordinate_sign(lat_type)
        origin.longitude = longitude * self._coordinate_sign(lon_type)
        origin.depth = depth * 1000
        origin.depth_type = 'from location'
        origin.quality = OriginQuality()
        origin.quality.associated_station_count = station_number
        origin.quality.standard_error = standard_dev
        # associated_phase_count can be incremented in records 'P ' and 'S '
        origin.quality.associated_phase_count = 0
        # depth_phase_count can be incremented in record 'S '
        origin.quality.depth_phase_count = 0
        origin.origin_type = 'hypocenter'
        origin.region = fe_region_name
        event.origins.append(origin)
        return event
Example #9
0
 def _create_info(self,
                  author: Optional[str] = None,
                  agency_id: Optional[str] = None):
     """Make creation info for """
     out = CreationInfo(creation_time=UTCDateTime().now(),
                        author=author,
                        agency_id=agency_id)
     return out
Example #10
0
 def test_update_after(self, catalog):
     """test that ids can be used to filter"""
     eve = catalog[0]
     time = obspy.UTCDateTime("2017-05-04")
     eve.creation_info = CreationInfo(creation_time=time)
     out = catalog.get_events(updatedafter=time - 2)
     assert len(out) == 1
     assert out[0] == catalog[0]
Example #11
0
 def set_creation_info(self,
                       username,
                       agency_id=AGENCY_ID,
                       agency_uri=AGENCY_URI):
     self.creation_info = CreationInfo()
     self.creation_info.author = username
     self.creation_info.agency_id = agency_id
     self.creation_info.agency_uri = agency_uri
     self.creation_info.creation_time = UTCDateTime()
Example #12
0
def append_cmt_to_catalog(event_origin,
                          cmt_to_add,
                          tag="new_cmt",
                          author="Princeton GATG",
                          change_preferred_id=True):
    """
    Add cmt to event. The cmt.resource_id will be appened tag to avoid
    tag duplication problem in event.
    :param event: the event that you want to add cmt in.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param cmt: the cmt that you want to add to event.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param change_preferred_id: change all preferred_id to the new added cmt
    :type change_preferred_id: bool
    :return: obspy.Catalog
    """
    event = _parse_event(event_origin)
    cmt_event = _parse_event(cmt_to_add)

    if not isinstance(tag, str):
        raise TypeError("tag(%s) should be type of str" % type(tag))

    if not isinstance(author, str):
        raise TypeError("author(%s) should be type of str" % type(author))

    # User defined creation information
    creation_info = CreationInfo(author=author, version=tag)

    # add cmt origin
    cmt_origin = prepare_cmt_origin(cmt_event, tag, creation_info)
    event.origins.append(cmt_origin)

    # add cmt magnitude
    cmt_mag = prepare_cmt_mag(cmt_event, tag, cmt_origin.resource_id,
                              creation_info)
    event.magnitudes.append(cmt_mag)

    # add cmt focal mechanism
    cmt_focal = prepare_cmt_focal(cmt_event, tag, cmt_origin.resource_id,
                                  cmt_mag.resource_id, creation_info)
    event.focal_mechanisms.append(cmt_focal)

    # change preferred id if needed
    if change_preferred_id:
        event.preferred_origin_id = str(cmt_origin.resource_id)
        event.preferred_magnitude_id = str(cmt_mag.resource_id)
        event.preferred_focal_mechanism_id = str(cmt_focal.resource_id)
        _validator(event, cmt_origin, cmt_mag, cmt_focal)

    new_cat = Catalog()
    new_cat.append(event)

    return new_cat
Example #13
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = '/'.join((res_id_prefix,
                        self.filename.replace(':', '/')))\
         .replace('\\', '/').replace('//', '/')
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = 'Created from NEIC PDE mchedr format'
     catalog.comments = ''
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         # XXX: ugly, probably we should do everything in byte strings
         # here? Is the pde / mchedr format unicode aware?
         line = line.decode()
         record_id = line[0:2]
         if record_id == 'HY':
             event = self._parse_record_hy(line)
             catalog.append(event)
         elif record_id == 'P ':
             pick, arrival = self._parse_record_p(line, event)
         elif record_id == 'E ':
             self._parse_record_e(line, event)
         elif record_id == 'L ':
             self._parse_record_l(line, event)
         elif record_id == 'A ':
             self._parse_record_a(line, event)
         elif record_id == 'C ':
             self._parse_record_c(line, event)
         elif record_id == 'AH':
             self._parse_record_ah(line, event)
         elif record_id == 'AE':
             self._parse_record_ae(line, event)
         elif record_id == 'Dp':
             focal_mechanism = self._parse_record_dp(line, event)
         elif record_id == 'Dt':
             self._parse_record_dt(line, focal_mechanism)
         elif record_id == 'Da':
             self._parse_record_da(line, focal_mechanism)
         elif record_id == 'Dc':
             self._parse_record_dc(line, focal_mechanism)
         elif record_id == 'M ':
             self._parse_record_m(line, event, pick)
         elif record_id == 'S ':
             self._parse_record_s(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
         event.scope_resource_ids()
     return catalog
Example #14
0
File: core.py Project: znamy/obspy
    def _parse_magnitude(self, line):
        #    1-5  a5   magnitude type (mb, Ms, ML, mbmle, msmle)
        magnitude_type = line[0:5].strip()
        #      6  a1   min max indicator (<, >, or blank)
        # TODO figure out the meaning of this min max indicator
        min_max_indicator = line[5:6].strip()
        #   7-10  f4.1 magnitude value
        mag = float_or_none(line[6:10])
        #  12-14  f3.1 standard magnitude error
        mag_errors = float_or_none(line[11:14])
        #  16-19  i4   number of stations used to calculate magni-tude
        station_count = int_or_none(line[15:19])
        #  21-29  a9   author of the origin
        author = line[20:29].strip()
        #  31-38  a8   origin identification
        origin_id = line[30:38].strip()

        # process items
        if author:
            creation_info = CreationInfo(author=author)
        else:
            creation_info = None
        mag_errors = mag_errors and QuantityError(uncertainty=mag_errors)
        if origin_id:
            origin_id = self._construct_id(['origin', origin_id])
        else:
            origin_id = None
        if not magnitude_type:
            magnitude_type = None
        # magnitudes have no id field, so construct a unique one at least
        resource_id = self._construct_id(['magnitude'], add_hash=True)

        if min_max_indicator:
            msg = 'Magnitude min/max indicator field not yet implemented'
            warnings.warn(msg)

        # combine and return
        mag = Magnitude(magnitude_type=magnitude_type,
                        mag=mag,
                        station_count=station_count,
                        creation_info=creation_info,
                        mag_errors=mag_errors,
                        origin_id=origin_id,
                        resource_id=resource_id)
        # event init always sets an empty QuantityError, even when specifying
        # None, which is strange
        for key in ['mag_errors']:
            setattr(mag, key, None)
        return mag
Example #15
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = '/'.join((res_id_prefix, self.filename))
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = 'Created from NEIC PDE mchedr format'
     catalog.comments = ''
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         record_id = line[0:2]
         if record_id == 'HY':
             event = self._parseRecordHY(line)
             catalog.append(event)
         elif record_id == 'P ':
             pick, arrival = self._parseRecordP(line, event)
         elif record_id == 'E ':
             self._parseRecordE(line, event)
         elif record_id == 'L ':
             self._parseRecordL(line, event)
         elif record_id == 'A ':
             self._parseRecordA(line, event)
         elif record_id == 'C ':
             self._parseRecordC(line, event)
         elif record_id == 'AH':
             self._parseRecordAH(line, event)
         elif record_id == 'AE':
             self._parseRecordAE(line, event)
         elif record_id == 'Dp':
             focal_mechanism = self._parseRecordDp(line, event)
         elif record_id == 'Dt':
             self._parseRecordDt(line, focal_mechanism)
         elif record_id == 'Da':
             self._parseRecordDa(line, focal_mechanism)
         elif record_id == 'Dc':
             self._parseRecordDc(line, focal_mechanism)
         elif record_id == 'M ':
             self._parseRecordM(line, event, pick)
         elif record_id == 'S ':
             self._parseRecordS(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
     return catalog
Example #16
0
 def test_creation_info(self):
     # 1 - empty Origin class will set creation_info to None
     orig = Origin()
     assert orig.creation_info is None
     # 2 - preset via dict or existing CreationInfo object
     orig = Origin(creation_info={})
     assert isinstance(orig.creation_info, CreationInfo)
     orig = Origin(creation_info=CreationInfo(author='test2'))
     assert isinstance(orig.creation_info, CreationInfo)
     assert orig.creation_info.author == 'test2'
     # 3 - check set values
     orig = Origin(creation_info={'author': 'test'})
     assert orig.creation_info == orig['creation_info']
     assert orig.creation_info.author == 'test'
     assert orig['creation_info']['author'] == 'test'
     orig.creation_info.agency_id = "muh"
     assert orig.creation_info == orig['creation_info']
     assert orig.creation_info.agency_id == 'muh'
     assert orig['creation_info']['agency_id'] == 'muh'
Example #17
0
def _read_focmec_out(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC out
        file.
    """
    event, _ = _read_common_header(lines)
    # now move to first line with a focal mechanism
    for i, line in enumerate(lines):
        if line.split()[:3] == ['Dip', 'Strike', 'Rake']:
            break
    else:
        return event
    header = lines[:i]
    polarity_count, weighted = _get_polarity_count(header)
    focmec_list_header = lines[i]
    event.comments.append(Comment(text='\n'.join(header)))
    try:
        lines = lines[i + 1:]
    except IndexError:
        return event
    for line in lines:
        # allow for empty lines (maybe they can happen at the end sometimes..)
        if not line.strip():
            continue
        comment = Comment(text='\n'.join((focmec_list_header, line)))
        items = line.split()
        dip, strike, rake = [float(x) for x in items[:3]]
        plane = NodalPlane(strike=strike, dip=dip, rake=rake)
        planes = NodalPlanes(nodal_plane_1=plane, preferred_plane=1)
        # XXX ideally should compute the auxilliary plane..
        focmec = FocalMechanism(nodal_planes=planes)
        focmec.station_polarity_count = polarity_count
        focmec.creation_info = CreationInfo(
            version='FOCMEC', creation_time=event.creation_info.creation_time)
        if not weighted:
            errors = sum([int(x) for x in items[3:6]])
            focmec.misfit = float(errors) / polarity_count
        focmec.comments.append(comment)
        event.focal_mechanisms.append(focmec)
    return event
Example #18
0
 def test_creationInfo(self):
     # 1 - empty Origin class will set creation_info to None
     orig = Origin()
     self.assertEqual(orig.creation_info, None)
     # 2 - preset via dict or existing CreationInfo object
     orig = Origin(creation_info={})
     self.assertTrue(isinstance(orig.creation_info, CreationInfo))
     orig = Origin(creation_info=CreationInfo(author='test2'))
     self.assertTrue(isinstance(orig.creation_info, CreationInfo))
     self.assertEqual(orig.creation_info.author, 'test2')
     # 3 - check set values
     orig = Origin(creation_info={'author': 'test'})
     self.assertEqual(orig.creation_info, orig['creation_info'])
     self.assertEqual(orig.creation_info.author, 'test')
     self.assertEqual(orig['creation_info']['author'], 'test')
     orig.creation_info.agency_id = "muh"
     self.assertEqual(orig.creation_info, orig['creation_info'])
     self.assertEqual(orig.creation_info.agency_id, 'muh')
     self.assertEqual(orig['creation_info']['agency_id'], 'muh')
Example #19
0
 def test_more_than_three_mags(self):
     cat = Catalog()
     cat += full_test_event()
     cat[0].magnitudes.append(
         Magnitude(mag=0.9,
                   magnitude_type='MS',
                   creation_info=CreationInfo('TES'),
                   origin_id=cat[0].origins[0].resource_id))
     with NamedTemporaryFile(suffix='.out') as tf:
         # raises UserWarning: mb is not convertible
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat.write(tf.name, format='nordic')
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat_back = read_events(tf.name)
         for event_1, event_2 in zip(cat, cat_back):
             self.assertTrue(
                 len(event_1.magnitudes) == len(event_2.magnitudes))
             _assert_similarity(event_1, event_2)
Example #20
0
 def __init__(self,
              name=None,
              st=None,
              lowcut=None,
              highcut=None,
              samp_rate=None,
              filt_order=None,
              process_length=None,
              prepick=None,
              event=None):
     name_regex = re.compile(r"^[a-z_0-9]+$")
     if name is not None and not re.match(name_regex, name):
         raise ValueError("Invalid name: '%s' - Must satisfy the regex "
                          "'%s'." % (name, name_regex.pattern))
     if name is None:
         temp_name = "unnamed"
     else:
         temp_name = name
     self.name = name
     self.st = st
     self.lowcut = lowcut
     self.highcut = highcut
     self.samp_rate = samp_rate
     if st and samp_rate is not None:
         for tr in st:
             if not tr.stats.sampling_rate == self.samp_rate:
                 raise MatchFilterError(
                     'Sampling rates do not match in data.')
     self.filt_order = filt_order
     self.process_length = process_length
     self.prepick = prepick
     if event is not None:
         if "eqcorrscan_template_" + temp_name not in \
                 [c.text for c in event.comments]:
             event.comments.append(
                 Comment(
                     text="eqcorrscan_template_" + temp_name,
                     creation_info=CreationInfo(agency='eqcorrscan',
                                                author=getpass.getuser())))
     self.event = event
Example #21
0
 def __init__(self,
              trace,
              time,
              name='',
              comments='',
              method=method_other,
              phase_hint=None,
              polarity='undecidable',
              aic=None,
              n0_aic=None,
              *args, **kwargs):
     self.trace = trace
     if time < 0 or time >= len(self.trace.signal):
         raise ValueError("Event position must be a value between 0 and %d"
                          % len(self.trace.signal))
     self.stime = time
     self.name = name
     self.method = method
     self.aic = aic
     self.n0_aic = n0_aic
     phase_hint = phase_hint if phase_hint in PHASE_VALUES else PHASE_VALUES[0]
     super(ApasvoEvent, self).__init__(time=self.time,
                                       method_id=ResourceIdentifier(method),
                                       phase_hint=phase_hint,
                                       polarity=polarity,
                                       creation_info=CreationInfo(
                                           author=kwargs.get('author', ''),
                                           agency_id=kwargs.get('agency', ''),
                                           creation_time=UTCDateTime.now(),
                                       ),
                                       waveform_id=WaveformStreamID(
                                           network_code=self.trace.stats.get('network', ''),
                                           station_code=self.trace.stats.get('station', ''),
                                           location_code=self.trace.stats.get('location', ''),
                                           channel_code=self.trace.stats.get('channel', ''),
                                       ),
                                       *args,
                                       **kwargs)
     self.comments = comments
Example #22
0
def brightness(stations,
               nodes,
               lags,
               stream,
               threshold,
               thresh_type,
               template_length,
               template_saveloc,
               coherence_thresh,
               coherence_stations=['all'],
               coherence_clip=False,
               gap=2.0,
               clip_level=100,
               instance=0,
               pre_pick=0.2,
               plotsave=True,
               cores=1):
    r"""Function to calculate the brightness function in terms of energy for \
    a day of data over the entire network for a given grid of nodes.

    Note data in stream must be all of the same length and have the same
    sampling rates.

    :type stations: list
    :param stations: List of station names from in the form where stations[i] \
        refers to nodes[i][:] and lags[i][:]
    :type nodes: list, tuple
    :param nodes: List of node points where nodes[i] referes to stations[i] \
        and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \
        longitude in degrees, nodes[:][:][2] is depth in km.
    :type lags: :class: 'numpy.array'
    :param lags: Array of arrays where lags[i][:] refers to stations[i]. \
        lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \
        seconds.
    :type stream: :class: `obspy.Stream`
    :param data: Data through which to look for detections.
    :type threshold: float
    :param threshold: Threshold value for detection of template within the \
        brightness function
    :type thresh_type: str
    :param thresh_type: Either MAD or abs where MAD is the Median Absolute \
        Deviation and abs is an absoulte brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple of floats
    :param coherence_thresh: Threshold for removing incoherant peaks in the \
            network response, those below this will not be used as templates. \
            Must be in the form of (a,b) where the coherence is given by: \
            a-kchan/b where kchan is the number of channels used to compute \
            the coherence
    :type coherence_stations: list
    :param coherence_stations: List of stations to use in the coherance \
            thresholding - defaults to 'all' which uses all the stations.
    :type coherence_clip: float
    :param coherence_clip: tuple
    :type coherence_clip: Start and end in seconds of data to window around, \
            defaults to False, which uses all the data given.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotsave: bool
    :param plotsave: Save or show plots, if False will try and show the plots \
            on screen - as this is designed for bulk use this is set to \
            True to save any plots rather than show them if you create \
            them - changes the backend of matplotlib, so if is set to \
            False you will see NO PLOTS!
    :type cores: int
    :param core: Number of cores to use, defaults to 1.
    :type clip_level: float
    :param clip_level: Multiplier applied to the mean deviation of the energy \
                    as an upper limit, used to remove spikes (earthquakes, \
                    lightning, electircal spikes) from the energy stack.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections

    :return: list of templates as :class: `obspy.Stream` objects
    """
    from eqcorrscan.core.template_gen import _template_gen
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    # from joblib import Parallel, delayed
    from multiprocessing import Pool, cpu_count
    from copy import deepcopy
    from obspy import read as obsread
    from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin
    from obspy.core.event import EventDescription, CreationInfo, Comment
    import obspy.Stream
    import matplotlib.pyplot as plt
    from eqcorrscan.utils import EQcorrscan_plotting as plotting
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to converse memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise OverflowError('Too many streams, either re-code and cope with' +
                            'either more memory usage, or less precision, or' +
                            'reduce data volume')
    detections = []
    detect_lags = []
    parallel = True
    plotvar = True
    mem_issue = False
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    if not parallel:
        for i in range(0, len(nodes)):
            print(i)
            if not mem_issue:
                j, a = _node_loop(stations, lags[:, i], stream, plot=True)
                if 'energy' not in locals():
                    energy = a
                else:
                    energy = np.concatenate((energy, a), axis=0)
                print('energy: ' + str(np.shape(energy)))
            else:
                j, filename = _node_loop(stations, lags[:, i], stream, i,
                                         mem_issue)
        energy = np.array(energy)
        print(np.shape(energy))
    else:
        # Parallel run
        num_cores = cores
        if num_cores > len(nodes):
            num_cores = len(nodes)
        if num_cores > cpu_count():
            num_cores = cpu_count()
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_node_loop,
                             args=(stations, lags[:, i], stream, i, clip_level,
                                   mem_issue, instance))
            for i in range(len(nodes))
        ]
        pool.close()
        if not mem_issue:
            print('Computing the cumulative network response from memory')
            energy = [p.get() for p in results]
            pool.join()
            energy.sort(key=lambda tup: tup[0])
            energy = [node[1] for node in energy]
            energy = np.concatenate(energy, axis=0)
            print(energy.shape)
        else:
            pool.join()
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indeces = np.argmax(energy, axis=0)  # Indeces of maximum energy
        print(indeces.shape)
        cum_net_resp = np.array([np.nan] * len(indeces))
        cum_net_resp[0] = energy[indeces[0]][0]
        peak_nodes = [nodes[indeces[0]]]
        for i in range(1, len(indeces)):
            cum_net_resp[i] = energy[indeces[i]][i]
            peak_nodes.append(nodes[indeces[i]])
        del energy, indeces
    else:
        print('Reading the temp files and computing network response')
        node_splits = len(nodes) // num_cores
        indeces = [range(node_splits)]
        for i in range(1, num_cores - 1):
            indeces.append(range(node_splits * i, node_splits * (i + 1)))
        indeces.append(range(node_splits * (i + 1), len(nodes)))
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_cum_net_resp, args=(indeces[i], instance))
            for i in range(num_cores)
        ]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indeces = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indeces = np.argmax(cum_net_resp, axis=0)
        print(indeces.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array(
            [cum_net_resp[indeces[i]][i] for i in range(len(indeces))])
        peak_nodes = [
            nodes[node_indeces[indeces[i]][i]] for i in range(len(indeces))
        ]
        del indeces, node_indeces
    if plotvar:
        cum_net_trace = deepcopy(stream[0])
        cum_net_trace.data = cum_net_resp
        cum_net_trace.stats.station = 'NR'
        cum_net_trace.stats.channel = ''
        cum_net_trace.stats.network = 'Z'
        cum_net_trace.stats.location = ''
        cum_net_trace.stats.starttime = stream[0].stats.starttime
        cum_net_trace = obspy.Stream(cum_net_trace)
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])
        # np.save('cum_net_resp.npy',cum_net_resp)
        #     cum_net_trace.plot(size=(800,600), equal_scale=False,\
        #                        outfile='NR_timeseries.eps')

    # Find detection within this network response
    print('Finding detections in the cumulatve network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections in to templates')
        # Generate a catalog of detections
        detections_cat = Catalog()
        for j, detection in enumerate(detections):
            print('Converting for detection ' + str(j) + ' of ' +
                  str(len(detections)))
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            print(node)
            # Look up node in nodes and find the associated lags
            index = nodes.index(node)
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + len(detect_lags))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(
                            station_code=tr.stats.station,
                            channel_code=tr.stats.channel,
                            network_code='NA')
                        event.picks.append(
                            Pick(waveform_id=_waveform_id,
                                 time=tr.stats.starttime + detect_lag +
                                 detection.detect_time + pre_pick,
                                 onset='emergent',
                                 evalutation_mode='automatic'))
            print('Generating template for detection: ' + str(j))
            template = (_template_gen(event.picks, copy_of_stream,
                                      template_length, 'all'))
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherancy here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherant = True
            else:
                print('Template was incoherant, coherence level: ' +
                      str(temp_coher))
                coherant = False
            del copy_of_stream, tr, template
            if coherant:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            else:
                print('No template for you')
    if plotvar:
        all_detections = [(cum_net_trace[-1].stats.starttime +
                           detection.detect_time).datetime
                          for detection in detections]
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             save=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
 def process(self, event):
     print("Finished picking.")
     for trace_id, picker in self.p_picks.items():
         if picker.time is not None:
             if picker.polarity == "up":
                 polarity = "positive"
             elif picker.polarity == "down":
                 polarity = "negative"
             else:
                 polarity = "undecidable"
             self.event_out.picks.append(Pick(
                 phase_hint="P", time=picker.time,
                 waveform_id=WaveformStreamID(seed_string=trace_id),
                 evaluation_mode="manual", polarity=polarity,
                 creation_info=CreationInfo(author=getpass.getuser())))
     for trace_id, picker in self.s_picks.items():
         if picker.time is not None:
             self.event_out.picks.append(Pick(
                 phase_hint="S", time=picker.time,
                 waveform_id=WaveformStreamID(seed_string=trace_id),
                 evaluation_mode="manual",
                 creation_info=CreationInfo(author=getpass.getuser())))
     for trace_id, picker in self.amplitude_picks.items():
         if picker.time is not None:
             amp_pick = Pick(
                 time=picker.time, phase_hint="IAML",
                 waveform_id=WaveformStreamID(seed_string=trace_id),
                 evaluation_mode="manual",
                 creation_info=CreationInfo(author=getpass.getuser()))
             self.event_out.picks.append(amp_pick)
             self.event_out.amplitudes.append(Amplitude(
                 generic_amplitude=picker.amplitude, type="A",
                 pick_id=amp_pick.resource_id,
                 waveform_id=WaveformStreamID(seed_string=trace_id),
                 evaluation_mode="manual",
                 creation_info=CreationInfo(author=getpass.getuser())))
     for trace_id, picker in self.duration_picks.items():
         if picker.time is None:
             continue
         # Get linked P pick
         duration_start = [
             p for p in self.event_out.picks 
             if p.phase_hint == "P" and
             p.waveform_id.get_seed_string() == trace_id]
         if len(duration_start) == 0:
             print("No matching P for duration on {0}".format(trace_id))
             continue
         duration_start = sorted([p.time for p in duration_start])[0]
         print("Duration: {0:.4f}s".format(picker.time - duration_start))
         duration_pick = Pick(
             time=picker.time, phase_hint="END",
             waveform_id=WaveformStreamID(seed_string=trace_id),
             evaluation_mode="manual",
             creation_info=CreationInfo(author=getpass.getuser()))
         self.event_out.picks.append(duration_pick)
         self.event_out.amplitudes.append(Amplitude(
             generic_amplitude=picker.time - duration_start, type="END",
             pick_id=duration_pick.resource_id,
             waveform_id=WaveformStreamID(seed_string=trace_id),
             evaluation_mode="manual",
             creation_info=CreationInfo(author=getpass.getuser())))
     print("Finished processing event. Returning")
     return
Example #24
0
    def construct(self, method, lowcut, highcut, samp_rate, filt_order,
                  length, prepick, swin="all", process_len=86400,
                  all_horiz=False, delayed=True, plot=False, plotdir=None,
                  min_snr=None, parallel=False, num_cores=False,
                  skip_short_chans=False, save_progress=False, **kwargs):
        """
        Generate a Tribe of Templates.

        :type method: str
        :param method:
            Method of Tribe generation. Possible options are: `from_client`,
            `from_seishub`, `from_meta_file`.  See below on the additional
            required arguments for each method.
        :type lowcut: float
        :param lowcut:
            Low cut (Hz), if set to None will not apply a lowcut
        :type highcut: float
        :param highcut:
            High cut (Hz), if set to None will not apply a highcut.
        :type samp_rate: float
        :param samp_rate:
            New sampling rate in Hz.
        :type filt_order: int
        :param filt_order:
            Filter level (number of corners).
        :type length: float
        :param length: Length of template waveform in seconds.
        :type prepick: float
        :param prepick: Pre-pick time in seconds
        :type swin: str
        :param swin:
            P, S, P_all, S_all or all, defaults to all: see note in
            :func:`eqcorrscan.core.template_gen.template_gen`
        :type process_len: int
        :param process_len: Length of data in seconds to download and process.
        :type all_horiz: bool
        :param all_horiz:
            To use both horizontal channels even if there is only a pick on
            one of them.  Defaults to False.
        :type delayed: bool
        :param delayed: If True, each channel will begin relative to it's own
            pick-time, if set to False, each channel will begin at the same
            time.
        :type plot: bool
        :param plot: Plot templates or not.
        :type plotdir: str
        :param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type min_snr: float
        :param min_snr:
            Minimum signal-to-noise ratio for a channel to be included in the
            template, where signal-to-noise ratio is calculated as the ratio
            of the maximum amplitude in the template window to the rms
            amplitude in the whole window given.
        :type parallel: bool
        :param parallel: Whether to process data in parallel or not.
        :type num_cores: int
        :param num_cores:
            Number of cores to try and use, if False and parallel=True,
            will use either all your cores, or as many traces as in the data
            (whichever is smaller).
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting template set at every data step or
            not. Useful for long-running processes.
        :type skip_short_chans: bool
        :param skip_short_chans:
            Whether to ignore channels that have insufficient length data or
            not. Useful when the quality of data is not known, e.g. when
            downloading old, possibly triggered data from a datacentre
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting party at every data step or not.
            Useful for long-running processes.

        .. note::
            *Method specific arguments:*

            - `from_client` requires:
                :param str client_id:
                    string passable by obspy to generate Client, or any object
                    with a `get_waveforms` method, including a Client instance.
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_seishub` requires:
                :param str url: url to seishub database
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_meta_file` requires:
                :param str meta_file:
                    Path to obspy-readable event file, or an obspy Catalog
                :param `obspy.core.stream.Stream` st:
                    Stream containing waveform data for template. Note that
                    this should be the same length of stream as you will use
                    for the continuous detection, e.g. if you detect in
                    day-long files, give this a day-long file!
                :param bool process:
                    Whether to process the data or not, defaults to True.

        .. Note::
            Method: `from_sac` is not supported by Tribe.construct and must
            use Template.construct.

        .. Note:: Templates will be named according to their start-time.
        """
        templates, catalog, process_lengths = template_gen.template_gen(
            method=method, lowcut=lowcut, highcut=highcut, length=length,
            filt_order=filt_order, samp_rate=samp_rate, prepick=prepick,
            return_event=True, save_progress=save_progress, swin=swin,
            process_len=process_len, all_horiz=all_horiz, plotdir=plotdir,
            delayed=delayed, plot=plot, min_snr=min_snr, parallel=parallel,
            num_cores=num_cores, skip_short_chans=skip_short_chans,
            **kwargs)
        for template, event, process_len in zip(templates, catalog,
                                                process_lengths):
            t = Template()
            for tr in template:
                if not np.any(tr.data.astype(np.float16)):
                    Logger.warning('Data are zero in float16, missing data,'
                                   ' will not use: {0}'.format(tr.id))
                    template.remove(tr)
            if len(template) == 0:
                Logger.error('Empty Template')
                continue
            t.st = template
            t.name = template.sort(['starttime'])[0]. \
                stats.starttime.strftime('%Y_%m_%dt%H_%M_%S')
            t.lowcut = lowcut
            t.highcut = highcut
            t.filt_order = filt_order
            t.samp_rate = samp_rate
            t.process_length = process_len
            t.prepick = prepick
            event.comments.append(Comment(
                text="eqcorrscan_template_" + t.name,
                creation_info=CreationInfo(agency='eqcorrscan',
                                           author=getpass.getuser())))
            t.event = event
            self.templates.append(t)
        return self
Example #25
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #26
0
def basic_test_event():
    """
    Function to generate a basic, full test event
    """
    from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude
    from obspy.core.event import Event, Origin, Magnitude
    from obspy.core.event import EventDescription, CreationInfo
    from obspy import UTCDateTime

    test_event = Event()
    test_event.origins.append(Origin())
    test_event.origins[0].time = UTCDateTime("2012-03-26") + 1
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.origins[0].latitude = 45.0
    test_event.origins[0].longitude = 25.0
    test_event.origins[0].depth = 15000
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[0].mag = 0.1
    test_event.magnitudes[0].magnitude_type = 'ML'
    test_event.magnitudes[0].creation_info = CreationInfo('TES')
    test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[1].mag = 0.5
    test_event.magnitudes[1].magnitude_type = 'Mc'
    test_event.magnitudes[1].creation_info = CreationInfo('TES')
    test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[2].mag = 1.3
    test_event.magnitudes[2].magnitude_type = 'Ms'
    test_event.magnitudes[2].creation_info = CreationInfo('TES')
    test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id

    # Define the test pick
    _waveform_id = WaveformStreamID(station_code='FOZ',
                                    channel_code='SHZ',
                                    network_code='NZ')
    test_event.picks.append(
        Pick(waveform_id=_waveform_id,
             onset='impulsive',
             phase_hint='PN',
             polarity='positive',
             time=UTCDateTime("2012-03-26") + 1.68,
             horizontal_slowness=12,
             backazimuth=20))
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=2.0,
                  period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id,
                  unit='m'))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2,
                phase=test_event.picks[0].phase_hint,
                pick_id=test_event.picks[0].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25))
    return test_event
Example #27
0
File: core.py Project: Qigaoo/obspy
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1: next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = (
                "Could not parse event %i (faulty file?). Will be "
                "skipped. Lines of the event:\n"
                "\t%s\n"
                "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(
            agency_id="GCMT",
            version=record["version_code"]
        )

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(
            force_resource_id=False,
            event_type="earthquake",
            event_type_certainty="known",
            event_descriptions=[
                EventDescription(text=region, type="Flinn-Engdahl region"),
                EventDescription(text=record["cmt_event_name"],
                                 type="earthquake name")
            ]
        )

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[Comment(text="Hypocenter catalog: %s" %
                              record["hypocenter_reference_catalog"],
                              force_resource_id=False)]
        )
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin", tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]},
            latitude=record["centroid_latitude"],
            latitude_errors={
                "uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000},
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy()
        )
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(
            force_resource_id=False,
            mag=round(record["Mw"], 2),
            magnitude_type="Mwc",
            origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude", tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["mb"],
            magnitude_type="mb",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'mb'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["MS"],
            magnitude_type="MS",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'MS'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(
            m_rr=record["m_rr"],
            m_rr_errors={"uncertainty": record["m_rr_error"]},
            m_pp=record["m_pp"],
            m_pp_errors={"uncertainty": record["m_pp_error"]},
            m_tt=record["m_tt"],
            m_tt_errors={"uncertainty": record["m_tt_error"]},
            m_rt=record["m_rt"],
            m_rt_errors={"uncertainty": record["m_rt_error"]},
            m_rp=record["m_rp"],
            m_rp_errors={"uncertainty": record["m_rp_error"]},
            m_tp=record["m_tp"],
            m_tp_errors={"uncertainty": record["m_tp_error"]},
            creation_info=creation_info.copy()
        )
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]
            ),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]
            ),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])
            ),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                             record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" %
                             record["cmt_timestamp"])],
            creation_info=creation_info.copy()
        )
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"],
                                             "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Example #28
0
def makeCatalog(StazList, mt, scale, args):

    epi = args.epi.rsplit()
    model = args.model.split(os.sep)
    NrSt = len(StazList)
    NrCo = NrSt * 3
    (Fmin, Fmax) = getFreq(args)
    Tmin = ('%.0f' % (1 / Fmax))
    Tmax = ('%.0f' % (1 / Fmin))
    mo = ('%.3e' % (mt[0]))
    mw = ('%.2f' % (mt[1]))
    Pdc = ('%.2f' % (float(mt[2]) / 100))
    Pclvd = ('%.2f' % (float(mt[3]) / 100))

    Tval = ('%10.3e' % (mt[22]))
    Tplg = ('%4.1f' % (mt[23]))
    Tazi = ('%5.1f' % (mt[24]))
    Nval = ('%10.3e' % (mt[25]))
    Nplg = ('%4.1f' % (mt[26]))
    Nazi = ('%5.1f' % (mt[27]))
    Pval = ('%10.3e' % (mt[28]))
    Pplg = ('%4.1f' % (mt[29]))
    Pazi = ('%5.1f' % (mt[30]))

    STp1 = ('%5.1f' % (mt[31]))
    DPp1 = ('%4.1f' % (mt[32]))
    RAp1 = ('%6.1f' % (mt[33]))
    STp2 = ('%5.1f' % (mt[34]))
    DPp2 = ('%4.1f' % (mt[35]))
    RAp2 = ('%6.1f' % (mt[36]))
    var = ('%.2f' % (mt[37]))
    qua = ('%d' % (mt[38]))
    mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]]

    mm0 = str('%10.3e' % (mij[0]))
    mm1 = str('%10.3e' % (mij[1]))
    mm2 = str('%10.3e' % (mij[2]))
    mm3 = str('%10.3e' % (mij[3]))
    mm4 = str('%10.3e' % (mij[4]))
    mm5 = str('%10.3e' % (mij[5]))
    # Aki konvention
    Mrr = mm5
    Mtt = mm0
    Mff = mm1
    Mrt = mm3
    Mrf = mm4
    Mtf = mm2

    # stress regime
    A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24])
    A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27])
    A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30])

    (regime, sh) = stressRegime(A1, A2, A3)
    sh = ('%5.1f' % (sh))

    #### Build classes #################################
    #
    #Resource Id is the event origin time for definition

    res_id = ResourceIdentifier(args.ori)
    nowUTC = datetime.datetime.utcnow()
    info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC)
    evOrigin = Origin(resource_id=res_id,
                      time=args.ori,
                      latitude=epi[0],
                      longitude=epi[1],
                      depth=epi[2],
                      earth_model_id=model[-1],
                      creation_info=info)
    # Magnitudes
    magnitude = Magnitude(mag=mw, magnitude_type="Mw")
    # Nodal Planes
    np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1)
    np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2)
    planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    # Principal axes
    Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval)
    Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval)
    Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval)
    axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe)
    # MT elements
    MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf)
    # Stress regime
    regStr = 'Stress regime: ' + regime + ' -  SH = ' + sh
    strDes = EventDescription(regStr)
    # MT dataset
    dataInfo = DataUsed(wave_type="combined",
                        station_count=NrSt,
                        component_count=NrCo,
                        shortest_period=Tmin,
                        longest_period=Tmax)
    source = MomentTensor(data_used=dataInfo,
                          scalar_moment=mo,
                          tensor=MT,
                          variance_reduction=var,
                          double_couple=Pdc,
                          clvd=Pclvd,
                          iso=0)
    focMec = FocalMechanism(moment_tensor=source,
                            nodal_planes=planes,
                            principal_axes=axes,
                            azimuthal_gap=-1)

    #Initialize Event Catalog
    mtSolution = Event(creation_info=info)
    mtSolution.origins.append(evOrigin)
    mtSolution.magnitudes.append(magnitude)
    mtSolution.focal_mechanisms.append(focMec)
    mtSolution.event_descriptions.append(strDes)

    cat = Catalog()
    cat.append(mtSolution)

    return cat
Example #29
0
def cross_net(stream, env=False, master=False):
    """
    Generate picks using a simple envelope cross-correlation.

    Picks are made for each channel based on optimal moveout defined by
    maximum cross-correlation with master trace.  Master trace will be the
    first trace in the stream if not set.  Requires good inter-station
    coherance.

    :type stream: obspy.core.stream.Stream
    :param stream: Stream to pick
    :type env: bool
    :param env: To compute cross-correlations on the envelope or not.
    :type master: obspy.core.trace.Trace
    :param master:
        Trace to use as master, if False, will use the first trace in stream.

    :returns: :class:`obspy.core.event.event.Event`

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import cross_net
    >>> st = read()
    >>> event = cross_net(st, env=True)
    >>> print(event.creation_info.author)
    EQcorrscan

    .. warning::
        This routine is not designed for accurate picking, rather it can be
        used for a first-pass at picks to obtain simple locations. Based on
        the waveform-envelope cross-correlation method.
    """
    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='cross_net'))
    samp_rate = stream[0].stats.sampling_rate
    if not env:
        Logger.info('Using the raw data')
        st = stream.copy()
        st.resample(samp_rate)
    else:
        st = stream.copy()
        Logger.info('Computing envelope')
        for tr in st:
            tr.resample(samp_rate)
            tr.data = envelope(tr.data)
    if not master:
        master = st[0]
    else:
        master = master
    master.data = np.nan_to_num(master.data)
    for i, tr in enumerate(st):
        tr.data = np.nan_to_num(tr.data)
        Logger.debug('Comparing {0} with the master'.format(tr.id))
        shift_len = int(0.3 * len(tr))
        Logger.debug('Shift length is set to ' + str(shift_len) + ' samples')
        index, cc = xcorr(master, tr, shift_len)
        wav_id = WaveformStreamID(station_code=tr.stats.station,
                                  channel_code=tr.stats.channel,
                                  network_code=tr.stats.network)
        event.picks.append(
            Pick(time=tr.stats.starttime + (index / tr.stats.sampling_rate),
                 waveform_id=wav_id,
                 phase_hint='S',
                 onset='emergent'))
        Logger.debug(event.picks[i])
    event.origins[0].time = min([pick.time for pick in event.picks]) - 1
    # event.origins[0].latitude = float('nan')
    # event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    del st
    return event
Example #30
0
def stalta_pick(stream,
                stalen,
                ltalen,
                trig_on,
                trig_off,
                freqmin=False,
                freqmax=False,
                show=False):
    """
    Basic sta/lta picker, suggest using alternative in obspy.

    Simple sta/lta (short-term average/long-term average) picker, using
    obspy's :func:`obspy.signal.trigger.classic_sta_lta` routine to generate
    the characteristic function.

    Currently very basic quick wrapper, there are many other (better) options
    in obspy in the :mod:`obspy.signal.trigger` module.

    :type stream: obspy.core.stream.Stream
    :param stream: The stream to pick on, can be any number of channels.
    :type stalen: float
    :param stalen: Length of the short-term average window in seconds.
    :type ltalen: float
    :param ltalen: Length of the long-term average window in seconds.
    :type trig_on: float
    :param trig_on: sta/lta ratio to trigger a detection/pick
    :type trig_off: float
    :param trig_off: sta/lta ratio to turn the trigger off - no further picks\
        will be made between exceeding trig_on until trig_off is reached.
    :type freqmin: float
    :param freqmin: Low-cut frequency in Hz for bandpass filter
    :type freqmax: float
    :param freqmax: High-cut frequency in Hz for bandpass filter
    :type show: bool
    :param show: Show picks on waveform.

    :returns: :class:`obspy.core.event.event.Event`

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import stalta_pick
    >>> st = read()
    >>> event = stalta_pick(st, stalen=0.2, ltalen=4, trig_on=10,
    ...             trig_off=1, freqmin=3.0, freqmax=20.0)
    >>> print(event.creation_info.author)
    EQcorrscan

    .. warning::
        This function is not designed for accurate picking, rather it can give
        a first idea of whether picks may be possible.  Proceed with caution.
    """
    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='stalta'))
    picks = []
    for tr in stream:
        # We are going to assume, for now, that if the pick is made on the
        # horizontal channel then it is an S, otherwise we will assume it is
        # a P-phase: obviously a bad assumption...
        if tr.stats.channel[-1] == 'Z':
            phase = 'P'
        else:
            phase = 'S'
        if freqmin and freqmax:
            tr.detrend('simple')
            tr.filter('bandpass',
                      freqmin=freqmin,
                      freqmax=freqmax,
                      corners=3,
                      zerophase=True)
        df = tr.stats.sampling_rate
        cft = classic_sta_lta(tr.data, int(stalen * df), int(ltalen * df))
        triggers = trigger_onset(cft, trig_on, trig_off)
        for trigger in triggers:
            on = tr.stats.starttime + (trigger[0] / df)
            # off = tr.stats.starttime + (trigger[1] / df)
            wav_id = WaveformStreamID(station_code=tr.stats.station,
                                      channel_code=tr.stats.channel,
                                      network_code=tr.stats.network)
            p = Pick(waveform_id=wav_id, phase_hint=phase, time=on)
            Logger.info('Pick made: {0}'.format(p))
            picks.append(p)
    # QC picks
    pick_stations = list(set([pick.waveform_id.station_code
                              for pick in picks]))
    for pick_station in pick_stations:
        station_picks = [
            pick for pick in picks
            if pick.waveform_id.station_code == pick_station
        ]
        # If P-pick is after S-picks, remove it.
        p_time = [
            pick.time for pick in station_picks if pick.phase_hint == 'P'
        ]
        s_time = [
            pick.time for pick in station_picks if pick.phase_hint == 'S'
        ]
        if p_time > s_time:
            p_pick = [pick for pick in station_picks if pick.phase_hint == 'P']
            for pick in p_pick:
                Logger.info('P pick after S pick, removing P pick')
                picks.remove(pick)
    event.picks = picks
    if show:
        plotting.pretty_template_plot(stream,
                                      event=event,
                                      title='Autopicks',
                                      size=(8, 9))
    if len(event.picks) > 0:
        event.origins[0].time = min([pick.time for pick in event.picks]) - 1
        # event.origins[0].latitude = float('nan')
        # event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    return event
Example #31
0
    def _parseRecordDp(self, line, event):
        """
        Parses the 'source parameter data - primary' record Dp
        """
        source_contributor = line[2:6].strip()
        computation_type = line[6]
        exponent = self._intZero(line[7])
        scale = math.pow(10, exponent)
        centroid_origin_time = line[8:14] + '.' + line[14]
        orig_time_stderr = line[15:17]
        if orig_time_stderr == 'FX':
            orig_time_stderr = 'Fixed'
        else:
            orig_time_stderr =\
                self._floatWithFormat(orig_time_stderr, '2.1', scale)
        centroid_latitude = self._floatWithFormat(line[17:21], '4.2')
        lat_type = line[21]
        if centroid_latitude is not None:
            centroid_latitude *= self._coordinateSign(lat_type)
        lat_stderr = line[22:25]
        if lat_stderr == 'FX':
            lat_stderr = 'Fixed'
        else:
            lat_stderr = self._floatWithFormat(lat_stderr, '3.2', scale)
        centroid_longitude = self._floatWithFormat(line[25:30], '5.2')
        lon_type = line[30]
        if centroid_longitude is not None:
            centroid_longitude *= self._coordinateSign(lon_type)
        lon_stderr = line[31:34]
        if lon_stderr == 'FX':
            lon_stderr = 'Fixed'
        else:
            lon_stderr = self._floatWithFormat(lon_stderr, '3.2', scale)
        centroid_depth = self._floatWithFormat(line[34:38], '4.1')
        depth_stderr = line[38:40]
        if depth_stderr == 'FX' or depth_stderr == 'BD':
            depth_stderr = 'Fixed'
        else:
            depth_stderr = self._floatWithFormat(depth_stderr, '2.1', scale)
        station_number = self._intZero(line[40:43])
        component_number = self._intZero(line[43:46])
        station_number2 = self._intZero(line[46:48])
        component_number2 = self._intZero(line[48:51])
        #unused: half_duration = self._floatWithFormat(line[51:54], '3.1')
        moment = self._floatWithFormat(line[54:56], '2.1')
        moment_stderr = self._floatWithFormat(line[56:58], '2.1')
        moment_exponent = self._int(line[58:60])
        if (moment is not None) and (moment_exponent is not None):
            moment *= math.pow(10, moment_exponent)
        if (moment_stderr is not None) and (moment_exponent is not None):
            moment_stderr *= math.pow(10, moment_exponent)

        evid = event.resource_id.id.split('/')[-1]
        #Create a new origin only if centroid time is defined:
        origin = None
        if centroid_origin_time.strip() != '.':
            origin = Origin()
            res_id = '/'.join(
                (res_id_prefix, 'origin', evid, source_contributor.lower(),
                 'mw' + computation_type.lower()))
            origin.resource_id = ResourceIdentifier(id=res_id)
            origin.creation_info =\
                CreationInfo(agency_id=source_contributor)
            date = event.origins[0].time.strftime('%Y%m%d')
            origin.time = UTCDateTime(date + centroid_origin_time)
            #Check if centroid time is on the next day:
            if origin.time < event.origins[0].time:
                origin.time += timedelta(days=1)
            self._storeUncertainty(origin.time_errors, orig_time_stderr)
            origin.latitude = centroid_latitude
            origin.longitude = centroid_longitude
            origin.depth = centroid_depth * 1000
            if lat_stderr == 'Fixed' and lon_stderr == 'Fixed':
                origin.epicenter_fixed = True
            else:
                self._storeUncertainty(origin.latitude_errors,
                                       self._latErrToDeg(lat_stderr))
                self._storeUncertainty(
                    origin.longitude_errors,
                    self._lonErrToDeg(lon_stderr, origin.latitude))
            if depth_stderr == 'Fixed':
                origin.depth_type = 'operator assigned'
            else:
                origin.depth_type = 'from location'
                self._storeUncertainty(origin.depth_errors,
                                       depth_stderr,
                                       scale=1000)
            quality = OriginQuality()
            quality.used_station_count =\
                station_number + station_number2
            quality.used_phase_count =\
                component_number + component_number2
            origin.quality = quality
            origin.type = 'centroid'
            event.origins.append(origin)
        focal_mechanism = FocalMechanism()
        res_id = '/'.join(
            (res_id_prefix, 'focalmechanism', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        focal_mechanism.resource_id = ResourceIdentifier(id=res_id)
        focal_mechanism.creation_info =\
            CreationInfo(agency_id=source_contributor)
        moment_tensor = MomentTensor()
        if origin is not None:
            moment_tensor.derived_origin_id = origin.resource_id
        else:
            #this is required for QuakeML validation:
            res_id = '/'.join((res_id_prefix, 'no-origin'))
            moment_tensor.derived_origin_id =\
                ResourceIdentifier(id=res_id)
        for mag in event.magnitudes:
            if mag.creation_info.agency_id == source_contributor:
                moment_tensor.moment_magnitude_id = mag.resource_id
        res_id = '/'.join(
            (res_id_prefix, 'momenttensor', evid, source_contributor.lower(),
             'mw' + computation_type.lower()))
        moment_tensor.resource_id = ResourceIdentifier(id=res_id)
        moment_tensor.scalar_moment = moment
        self._storeUncertainty(moment_tensor.scalar_moment_errors,
                               moment_stderr)
        data_used = DataUsed()
        data_used.station_count = station_number + station_number2
        data_used.component_count = component_number + component_number2
        if computation_type == 'C':
            res_id = '/'.join((res_id_prefix, 'methodID=CMT'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #CMT algorithm uses long-period body waves,
            #very-long-period surface waves and
            #intermediate period surface waves (since 2004
            #for shallow and intermediate-depth earthquakes
            # --Ekstrom et al., 2012)
            data_used.wave_type = 'combined'
        if computation_type == 'M':
            res_id = '/'.join((res_id_prefix, 'methodID=moment_tensor'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used by
            #"moment tensor" algorithm.
            data_used.wave_type = 'unknown'
        elif computation_type == 'B':
            res_id = '/'.join((res_id_prefix, 'methodID=broadband_data'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: is 'combined' correct here?
            data_used.wave_type = 'combined'
        elif computation_type == 'F':
            res_id = '/'.join((res_id_prefix, 'methodID=P-wave_first_motion'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            data_used.wave_type = 'P waves'
        elif computation_type == 'S':
            res_id = '/'.join((res_id_prefix, 'methodID=scalar_moment'))
            focal_mechanism.method_id = ResourceIdentifier(id=res_id)
            #FIXME: not sure which kind of data is used
            #for scalar moment determination.
            data_used.wave_type = 'unknown'
        moment_tensor.data_used = data_used
        focal_mechanism.moment_tensor = moment_tensor
        event.focal_mechanisms.append(focal_mechanism)
        return focal_mechanism
Example #32
0
    def build(self):
        """
        Build an obspy moment tensor focal mech event

        This makes the tensor output into an Event containing:
        1) a FocalMechanism with a MomentTensor, NodalPlanes, and PrincipalAxes
        2) a Magnitude of the Mw from the Tensor

        Which is what we want for outputting QuakeML using
        the (slightly modified) obspy code.

        Input
        -----
        filehandle => open file OR str from filehandle.read()

        Output
        ------
        event => instance of Event() class as described above
        """
        p = self.parser
        event         = Event(event_type='earthquake')
        origin        = Origin()
        focal_mech    = FocalMechanism()
        nodal_planes  = NodalPlanes()
        moment_tensor = MomentTensor()
        principal_ax  = PrincipalAxes()
        magnitude     = Magnitude()
        data_used     = DataUsed()
        creation_info = CreationInfo(agency_id='NN')
        ev_mode = 'automatic'
        ev_stat = 'preliminary'
        evid = None
        orid = None
        # Parse the entire file line by line.
        for n,l in enumerate(p.line):
            if 'REVIEWED BY NSL STAFF' in l:
                ev_mode = 'manual'
                ev_stat = 'reviewed'
            if 'Event ID' in l:
                evid = p._id(n)
            if 'Origin ID' in l:
                orid = p._id(n)
            if 'Ichinose' in l:
                moment_tensor.category = 'regional'
            if re.match(r'^\d{4}\/\d{2}\/\d{2}', l):
                ev = p._event_info(n)
            if 'Depth' in l:
                derived_depth = p._depth(n)
            if 'Mw' in l:
                magnitude.mag = p._mw(n) 
                magnitude.magnitude_type = 'Mw'
            if 'Mo' in l and 'dyne' in l:
                moment_tensor.scalar_moment = p._mo(n)
            if 'Percent Double Couple' in l:
                moment_tensor.double_couple = p._percent(n)
            if 'Percent CLVD' in l:
                moment_tensor.clvd = p._percent(n)
            if 'Epsilon' in l:
                moment_tensor.variance = p._epsilon(n)
            if 'Percent Variance Reduction' in l:
                moment_tensor.variance_reduction = p._percent(n)
            if 'Major Double Couple' in l and 'strike' in p.line[n+1]:
                np = p._double_couple(n)
                nodal_planes.nodal_plane_1 = NodalPlane(*np[0])
                nodal_planes.nodal_plane_2 = NodalPlane(*np[1])
                nodal_planes.preferred_plane = 1
            if 'Spherical Coordinates' in l:
                mt = p._mt_sphere(n)
                moment_tensor.tensor = Tensor(
                    m_rr = mt['Mrr'],
                    m_tt = mt['Mtt'],
                    m_pp = mt['Mff'],
                    m_rt = mt['Mrt'],
                    m_rp = mt['Mrf'],
                    m_tp = mt['Mtf'],
                    )
            if 'Eigenvalues and eigenvectors of the Major Double Couple' in l:
                ax = p._vectors(n)
                principal_ax.t_axis = Axis(ax['T']['trend'], ax['T']['plunge'], ax['T']['ev'])
                principal_ax.p_axis = Axis(ax['P']['trend'], ax['P']['plunge'], ax['P']['ev'])
                principal_ax.n_axis = Axis(ax['N']['trend'], ax['N']['plunge'], ax['N']['ev'])
            if 'Number of Stations' in l:
                data_used.station_count = p._number_of_stations(n)
            if 'Maximum' in l and 'Gap' in l:
                focal_mech.azimuthal_gap = p._gap(n)
            if re.match(r'^Date', l):
                creation_info.creation_time = p._creation_time(n)
        # Creation Time
        creation_info.version = orid
        # Fill in magnitude values
        magnitude.evaluation_mode = ev_mode
        magnitude.evaluation_status = ev_stat
        magnitude.creation_info = creation_info.copy()
        magnitude.resource_id = self._rid(magnitude)
        # Stub origin
        origin.time = ev.get('time')
        origin.latitude = ev.get('lat')
        origin.longitude = ev.get('lon')
        origin.depth = derived_depth * 1000.
        origin.depth_type = "from moment tensor inversion"
        origin.creation_info = creation_info.copy()
         # Unique from true origin ID
        _oid = self._rid(origin)
        origin.resource_id = ResourceIdentifier(str(_oid) + '/mt')
        del _oid
        # Make an id for the MT that references this origin
        ogid = str(origin.resource_id)
        doid = ResourceIdentifier(ogid, referred_object=origin)
        # Make an id for the moment tensor mag which references this mag
        mrid = str(magnitude.resource_id)
        mmid = ResourceIdentifier(mrid, referred_object=magnitude)
        # MT todo: could check/use URL for RID if parsing the php file
        moment_tensor.evaluation_mode = ev_mode
        moment_tensor.evaluation_status = ev_stat
        moment_tensor.data_used = data_used
        moment_tensor.moment_magnitude_id = mmid
        moment_tensor.derived_origin_id = doid
        moment_tensor.creation_info = creation_info.copy()
        moment_tensor.resource_id = self._rid(moment_tensor)
        # Fill in focal_mech values
        focal_mech.nodal_planes  = nodal_planes
        focal_mech.moment_tensor = moment_tensor
        focal_mech.principal_axes = principal_ax
        focal_mech.creation_info = creation_info.copy()
        focal_mech.resource_id = self._rid(focal_mech)
        # add mech and new magnitude to event
        event.focal_mechanisms = [focal_mech]
        event.magnitudes = [magnitude]
        event.origins = [origin]
        event.creation_info = creation_info.copy()
        # If an MT was done, that's the preferred mag/mech
        event.preferred_magnitude_id = str(magnitude.resource_id)
        event.preferred_focal_mechanism_id = str(focal_mech.resource_id)
        if evid:
            event.creation_info.version = evid
        event.resource_id = self._rid(event)
        self.event = event
Example #33
0
    def _parseRecordE(self, line, event):
        """
        Parses the 'error and magnitude' record E
        """
        orig_time_stderr = self._float(line[2:7])
        latitude_stderr = self._float(line[8:14])
        longitude_stderr = self._float(line[15:21])
        depth_stderr = self._float(line[22:27])
        mb_mag = self._float(line[28:31])
        mb_nsta = self._int(line[32:35])
        Ms_mag = self._float(line[36:39])
        Ms_nsta = self._int(line[39:42])
        mag1 = self._float(line[42:45])
        mag1_type = line[45:47]
        mag1_source_code = line[47:51].strip()
        mag2 = self._float(line[51:54])
        mag2_type = line[54:56]
        mag2_source_code = line[56:60].strip()

        evid = event.resource_id.id.split('/')[-1]
        origin = event.origins[0]
        self._storeUncertainty(origin.time_errors, orig_time_stderr)
        self._storeUncertainty(origin.latitude_errors,
                               self._latErrToDeg(latitude_stderr))
        self._storeUncertainty(
            origin.longitude_errors,
            self._lonErrToDeg(longitude_stderr, origin.latitude))
        self._storeUncertainty(origin.depth_errors, depth_stderr, scale=1000)
        if mb_mag is not None:
            mag = Magnitude()
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'mb'))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(agency_id='USGS-NEIC')
            mag.mag = mb_mag
            mag.magnitude_type = 'Mb'
            mag.station_count = mb_nsta
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
        if Ms_mag is not None:
            mag = Magnitude()
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, 'ms'))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(agency_id='USGS-NEIC')
            mag.mag = Ms_mag
            mag.magnitude_type = 'Ms'
            mag.station_count = Ms_nsta
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
        if mag1 is not None:
            mag = Magnitude()
            mag1_id = mag1_type.lower()
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag1_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(agency_id=mag1_source_code)
            mag.mag = mag1
            mag.magnitude_type = mag1_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)
        if mag2 is not None:
            mag = Magnitude()
            mag2_id = mag2_type.lower()
            if mag2_id == mag1_id:
                mag2_id += '2'
            res_id = '/'.join((res_id_prefix, 'magnitude', evid, mag2_id))
            mag.resource_id = ResourceIdentifier(id=res_id)
            mag.creation_info = CreationInfo(agency_id=mag2_source_code)
            mag.mag = mag2
            mag.magnitude_type = mag2_type
            mag.origin_id = origin.resource_id
            event.magnitudes.append(mag)