Beispiel #1
0
 def test_clear_method_resets_objects(self):
     """
     Tests that the clear() method properly resets all objects. Test for
     #449.
     """
     # Test with basic event object.
     e = Event(force_resource_id=False)
     e.comments.append(Comment(text="test"))
     e.event_type = "explosion"
     self.assertEqual(len(e.comments), 1)
     self.assertEqual(e.event_type, "explosion")
     e.clear()
     self.assertEqual(e, Event(force_resource_id=False))
     self.assertEqual(len(e.comments), 0)
     self.assertEqual(e.event_type, None)
     # Test with pick object. Does not really fit in the event test case but
     # it tests the same thing...
     p = Pick()
     p.comments.append(Comment(text="test"))
     p.phase_hint = "p"
     self.assertEqual(len(p.comments), 1)
     self.assertEqual(p.phase_hint, "p")
     # Add some more random attributes. These should disappear upon
     # cleaning.
     p.test_1 = "a"
     p.test_2 = "b"
     self.assertEqual(p.test_1, "a")
     self.assertEqual(p.test_2, "b")
     p.clear()
     self.assertEqual(len(p.comments), 0)
     self.assertEqual(p.phase_hint, None)
     self.assertFalse(hasattr(p, "test_1"))
     self.assertFalse(hasattr(p, "test_2"))
Beispiel #2
0
 def _parse_record_dc(self, line, focal_mechanism):
     """
     Parses the 'source parameter data - comment' record Dc
     """
     try:
         comment = focal_mechanism.comments[0]
         comment.text += line[2:60]
     except IndexError:
         comment = Comment()
         comment.resource_id = ResourceIdentifier(prefix=res_id_prefix)
         focal_mechanism.comments.append(comment)
         comment.text = line[2:60]
Beispiel #3
0
 def _parseRecordDc(self, line, focal_mechanism):
     """
     Parses the 'source parameter data - comment' record Dc
     """
     try:
         comment = focal_mechanism.comments[0]
         comment.text += line[2:60]
     except IndexError:
         comment = Comment()
         comment.resource_id = ResourceIdentifier(prefix=res_id_prefix)
         focal_mechanism.comments.append(comment)
         comment.text = line[2:60]
Beispiel #4
0
 def _parseRecordC(self, line, event):
     """
     Parses the 'general comment' record C
     """
     try:
         comment = event.comments[0]
         comment.text += line[2:60]
     except IndexError:
         comment = Comment()
         comment.resource_id = ResourceIdentifier(prefix=res_id_prefix)
         event.comments.append(comment)
         comment.text = line[2:60]
     # strip non printable-characters
     comment.text = "".join(x for x in comment.text if x in s.printable)
Beispiel #5
0
 def _parseRecordC(self, line, event):
     """
     Parses the 'general comment' record C
     """
     try:
         comment = event.comments[0]
         comment.text += line[2:60]
     except IndexError:
         comment = Comment()
         comment.resource_id = ResourceIdentifier(prefix=res_id_prefix)
         event.comments.append(comment)
         comment.text = line[2:60]
     # strip non printable-characters
     comment.text =\
         filter(lambda x: x in s.printable, comment.text)
Beispiel #6
0
def set_new_event_mag(event, station_mags, Mw, comment, make_preferred=False):
    count = len(station_mags)

    sta_mag_contributions = []

    for sta_mag in station_mags:
        sta_mag_contributions.append(
            StationMagnitudeContribution(
                station_magnitude_id=sta_mag.resource_id))

    origin_id = event.preferred_origin().resource_id

    event_mag = Magnitude(
        origin_id=origin_id,
        mag=Mw,
        magnitude_type='Mw',
        station_count=count,
        evaluation_mode='automatic',
        station_magnitude_contributions=sta_mag_contributions,
        comments=[Comment(text=comment)],
    )

    event.magnitudes.append(event_mag)
    event.station_magnitudes = station_mags

    if make_preferred:
        event.preferred_magnitude_id = ResourceIdentifier(
            id=event_mag.resource_id.id, referred_object=event_mag)

    return
Beispiel #7
0
def CUSP_to_SC3_rel_mags(det_cat, temp_cat, selfs):
    """
    Take a catalog with relative magnitudes calculated using the Ristau 2009
    CUSP equation and correct them using the updated SeisComP3 scale

    :param det_cat: Catalog of detections and templates with magnitudes
    :param selfs: List of strings for self detection ids
    :return:
    """
    # Make a dictionary of the CUSP-derived moment, SeisComP M0 for templates
    temp_mag_dict = {ev.resource_id.id.split('/')[-1]:
                        {'Old M0':
                              local_to_moment(ev.magnitudes[0].mag,
                                              m=0.88, c=0.73),
                         'New M0':
                              local_to_moment(ev.magnitudes[0].mag,
                                              m=0.97, c=0.14)}
                     for ev in temp_cat}
    # Now loop the catalog and redo the calculations
    for det in det_cat:
        # First determine the relative moment (I didn't save these anywhere...)
        eid = det.resource_id.id.split('/')[-1]
        if eid in selfs:
            print('Template event: Adding a Mw magnitude')
            det.magnitudes.append(
                Magnitude(mag=ML_to_Mw(det.magnitudes[0].mag, m=0.97, c=0.14),
                          magnitude_type='Mw',
                          comments=[Comment(text='Ristau et al., 2016 BSSA')]))
            continue
        tid = det.resource_id.id.split('/')[-1].split('_')[0]
        det_mo = Mw_to_M0([m.mag for m in det.magnitudes
                           if m.magnitude_type == 'Mw'][0])
        rel_mo = det_mo / temp_mag_dict[tid]['Old M0']
        new_det_mo = rel_mo * temp_mag_dict[tid]['New M0']
        new_det_Mw = (2. / 3. * np.log10(new_det_mo)) - 9.
        new_det_ML = (0.97 * new_det_Mw) + 0.14
        det.magnitudes.append(
            Magnitude(mag=new_det_Mw, magnitude_type='Mw',
                      comments=[Comment(text='rel_mo={}'.format(rel_mo))]))
        det.magnitudes.append(
            Magnitude(mag=new_det_ML, magnitude_type='ML',
                      comments=[Comment(text='rel_mo={}'.format(rel_mo))]))
        det.preferred_magnitude_id = det.magnitudes[-2].resource_id.id
    return
Beispiel #8
0
def _read_focmec_out(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC out
        file.
    """
    event, _ = _read_common_header(lines)
    # now move to first line with a focal mechanism
    for i, line in enumerate(lines):
        if line.split()[:3] == ['Dip', 'Strike', 'Rake']:
            break
    else:
        return event
    header = lines[:i]
    polarity_count, weighted = _get_polarity_count(header)
    focmec_list_header = lines[i]
    event.comments.append(Comment(text='\n'.join(header)))
    try:
        lines = lines[i + 1:]
    except IndexError:
        return event
    for line in lines:
        # allow for empty lines (maybe they can happen at the end sometimes..)
        if not line.strip():
            continue
        comment = Comment(text='\n'.join((focmec_list_header, line)))
        items = line.split()
        dip, strike, rake = [float(x) for x in items[:3]]
        plane = NodalPlane(strike=strike, dip=dip, rake=rake)
        planes = NodalPlanes(nodal_plane_1=plane, preferred_plane=1)
        # XXX ideally should compute the auxilliary plane..
        focmec = FocalMechanism(nodal_planes=planes)
        focmec.station_polarity_count = polarity_count
        focmec.creation_info = CreationInfo(
            version='FOCMEC', creation_time=event.creation_info.creation_time)
        if not weighted:
            errors = sum([int(x) for x in items[3:6]])
            focmec.misfit = float(errors) / polarity_count
        focmec.comments.append(comment)
        event.focal_mechanisms.append(focmec)
    return event
Beispiel #9
0
 def __setattr__(self, key, value):
     if key == 'stime':
         self.__dict__[key] = value
         self.__dict__['time'] = self._samples_to_seconds(value)
     elif key == 'time':
         self.__dict__[key] = value
         self.__dict__['stime'] = self._seconds_to_samples(value)
     elif key == 'comments':
         self.__dict__['comments'] = Comment(text=value)
     else:
         super(ApasvoEvent, self).__setattr__(key, value)
    def _on_file_save(self):
        """
        Creates a new obspy.core.event.Magnitude object and writes the moment
        magnitude to it.
        """
        # Get the save filename.
        filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...")
        filename = os.path.abspath(str(filename))
        mag = Magnitude()
        mag.mag = self.final_result["moment_magnitude"]
        mag.magnitude_type = "Mw"
        mag.station_count = self.final_result["station_count"]
        mag.evaluation_mode = "manual"
        # Link to the used origin.
        mag.origin_id = self.current_state["event"].origins[0].resource_id
        mag.method_id = "Magnitude picker Krischer"
        # XXX: Potentially change once this program gets more stable.
        mag.evaluation_status = "preliminary"
        # Write the other results as Comments.
        mag.comments.append( \
            Comment("Seismic moment in Nm: %g" % \
            self.final_result["seismic_moment"]))
        mag.comments.append( \
            Comment("Circular source radius in m: %.2f" % \
            self.final_result["source_radius"]))
        mag.comments.append( \
            Comment("Stress drop in Pa: %.2f" % \
            self.final_result["stress_drop"]))
        mag.comments.append( \
                Comment("Very rough Q estimation: %.1f" % \
            self.final_result["quality_factor"]))

        event = copy.deepcopy(self.current_state["event"])
        event.magnitudes.append(mag)
        cat = Catalog()
        cat.events.append(event)
        cat.write(filename, format="quakeml")
Beispiel #11
0
 def test_clear_method_resets_objects(self):
     """
     Tests that the clear() method properly resets all objects. Test for
     #449.
     """
     # Test with basic event object.
     e = Event(force_resource_id=False)
     e.comments.append(Comment(text="test"))
     e.event_type = "explosion"
     self.assertEqual(len(e.comments), 1)
     self.assertEqual(e.event_type, "explosion")
     e.clear()
     self.assertEqual(e, Event(force_resource_id=False))
     self.assertEqual(len(e.comments), 0)
     self.assertEqual(e.event_type, None)
     # Test with pick object. Does not really fit in the event test case but
     # it tests the same thing...
     p = Pick()
     p.comments.append(Comment(text="test"))
     p.phase_hint = "p"
     self.assertEqual(len(p.comments), 1)
     self.assertEqual(p.phase_hint, "p")
     # Add some more random attributes. These should disappear upon
     # cleaning.
     with warnings.catch_warnings(record=True) as w:
         warnings.simplefilter("always")
         p.test_1 = "a"
         p.test_2 = "b"
         # two warnings should have been issued by setting non-default keys
         self.assertEqual(len(w), 2)
     self.assertEqual(p.test_1, "a")
     self.assertEqual(p.test_2, "b")
     p.clear()
     self.assertEqual(len(p.comments), 0)
     self.assertEqual(p.phase_hint, None)
     self.assertFalse(hasattr(p, "test_1"))
     self.assertFalse(hasattr(p, "test_2"))
Beispiel #12
0
 def test_clear_method_resets_objects(self):
     """
     Tests that the clear() method properly resets all objects. Test for
     #449.
     """
     # Test with basic event object.
     e = Event(force_resource_id=False)
     e.comments.append(Comment(text="test"))
     e.event_type = "explosion"
     assert len(e.comments) == 1
     assert e.event_type == "explosion"
     e.clear()
     assert e == Event(force_resource_id=False)
     assert len(e.comments) == 0
     assert e.event_type is None
     # Test with pick object. Does not really fit in the event test case but
     # it tests the same thing...
     p = Pick()
     p.comments.append(Comment(text="test"))
     p.phase_hint = "p"
     assert len(p.comments) == 1
     assert p.phase_hint == "p"
     # Add some more random attributes. These should disappear upon
     # cleaning.
     with WarningsCapture() as w:
         p.test_1 = "a"
         p.test_2 = "b"
         # two warnings should have been issued by setting non-default keys
         assert len(w) == 2
     assert p.test_1 == "a"
     assert p.test_2 == "b"
     p.clear()
     assert len(p.comments) == 0
     assert p.phase_hint is None
     assert not hasattr(p, "test_1")
     assert not hasattr(p, "test_2")
def __toFocalMechanism(parser, focmec_el):
    """
    """
    global CURRENT_TYPE
    focmec = FocalMechanism()
    focmec.resource_id = ResourceIdentifier(
        prefix="/".join([RESOURCE_ROOT, "focal_mechanism"]))
    if CURRENT_TYPE == "obspyck":
        focmec.method_id = "%s/focal_mechanism_method/focmec/1" % RESOURCE_ROOT
    else:
        focmec.method_id = "%s/focal_mechanism_method/%s/1" % (
            RESOURCE_ROOT, parser.xpath2obj('program', focmec_el))
    if str(focmec.method_id).lower().endswith("none"):
        focmec.method_id = None
    focmec.station_polarity_count = parser.xpath2obj("stationPolarityCount",
                                                     focmec_el, int)
    if focmec.station_polarity_count:
        focmec.misfit = parser.xpath2obj("stationPolarityErrorCount",
                                         focmec_el, int) / float(
                                             focmec.station_polarity_count)
    focmec.nodal_planes = NodalPlanes()
    focmec.nodal_planes.nodal_plane_1 = NodalPlane()
    nodal_plane = focmec_el.find("nodalPlanes")
    if nodal_plane is None or not len(nodal_plane):
        return None
    n_p = focmec.nodal_planes.nodal_plane_1
    # There is always only one nodal plane, called nodalPlane1
    n_p.strike, strike_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/strike")
    n_p.dip, dip_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/dip")
    n_p.rake, rake_uncertainty = __toFloatQuantity(
        parser, focmec_el, "nodalPlanes/nodalPlane1/rake")
    if hasattr(strike_uncertainty, "uncertainty"):
        n_p.strike_errors.uncertainty = strike_uncertainty["uncertainty"]
    if hasattr(dip_uncertainty, "uncertainty"):
        n_p.dip_errors.uncertainty = dip_uncertainty["uncertainty"]
    if hasattr(rake_uncertainty, "uncertainty"):
        n_p.rake_errors.uncertainty = rake_uncertainty["uncertainty"]
    solution_count = parser.xpath2obj("possibleSolutionCount", focmec_el, int)
    if solution_count:
        focmec.comments.append(
            Comment(force_resource_id=False,
                    resource_id=None,
                    text="Possible Solution Count: %i" % solution_count))
    return focmec
Beispiel #14
0
 def __init__(self,
              name=None,
              st=None,
              lowcut=None,
              highcut=None,
              samp_rate=None,
              filt_order=None,
              process_length=None,
              prepick=None,
              event=None):
     name_regex = re.compile(r"^[a-z_0-9]+$")
     if name is not None and not re.match(name_regex, name):
         raise ValueError("Invalid name: '%s' - Must satisfy the regex "
                          "'%s'." % (name, name_regex.pattern))
     if name is None:
         temp_name = "unnamed"
     else:
         temp_name = name
     self.name = name
     self.st = st
     self.lowcut = lowcut
     self.highcut = highcut
     self.samp_rate = samp_rate
     if st and samp_rate is not None:
         for tr in st:
             if not tr.stats.sampling_rate == self.samp_rate:
                 raise MatchFilterError(
                     'Sampling rates do not match in data.')
     self.filt_order = filt_order
     self.process_length = process_length
     self.prepick = prepick
     if event is not None:
         if "eqcorrscan_template_" + temp_name not in \
                 [c.text for c in event.comments]:
             event.comments.append(
                 Comment(
                     text="eqcorrscan_template_" + temp_name,
                     creation_info=CreationInfo(agency='eqcorrscan',
                                                author=getpass.getuser())))
     self.event = event
Beispiel #15
0
def _read_focmec_lst_one_block(lines, polarity_count=None):
    comment = Comment(text='\n'.join(lines))
    while lines and not lines[0].lstrip().startswith('Dip,Strike,Rake'):
        lines.pop(0)
    # the last block does not contain a focmec but only a short comment how
    # many solutions there were overall, so we hit a block that will not have
    # the above line and we exhaust the lines list
    if not lines:
        return None, []
    dip, strike, rake = [float(x) for x in lines[0].split()[1:4]]
    plane1 = NodalPlane(strike=strike, dip=dip, rake=rake)
    lines.pop(0)
    dip, strike, rake = [float(x) for x in lines[0].split()[1:4]]
    plane2 = NodalPlane(strike=strike, dip=dip, rake=rake)
    planes = NodalPlanes(nodal_plane_1=plane1, nodal_plane_2=plane2,
                         preferred_plane=1)
    focmec = FocalMechanism(nodal_planes=planes)
    focmec.comments.append(comment)
    if polarity_count is not None:
        polarity_errors = _get_polarity_error_count_lst_block(lines)
        focmec.station_polarity_count = polarity_count
        focmec.misfit = float(polarity_errors) / polarity_count
    return focmec, lines
Beispiel #16
0
def stalta_pick(stream,
                stalen,
                ltalen,
                trig_on,
                trig_off,
                freqmin=False,
                freqmax=False,
                show=False):
    """
    Basic sta/lta picker, suggest using alternative in obspy.

    Simple sta/lta (short-term average/long-term average) picker, using
    obspy's :func:`obspy.signal.trigger.classic_sta_lta` routine to generate
    the characteristic function.

    Currently very basic quick wrapper, there are many other (better) options
    in obspy in the :mod:`obspy.signal.trigger` module.

    :type stream: obspy.core.stream.Stream
    :param stream: The stream to pick on, can be any number of channels.
    :type stalen: float
    :param stalen: Length of the short-term average window in seconds.
    :type ltalen: float
    :param ltalen: Length of the long-term average window in seconds.
    :type trig_on: float
    :param trig_on: sta/lta ratio to trigger a detection/pick
    :type trig_off: float
    :param trig_off: sta/lta ratio to turn the trigger off - no further picks\
        will be made between exceeding trig_on until trig_off is reached.
    :type freqmin: float
    :param freqmin: Low-cut frequency in Hz for bandpass filter
    :type freqmax: float
    :param freqmax: High-cut frequency in Hz for bandpass filter
    :type show: bool
    :param show: Show picks on waveform.

    :returns: :class:`obspy.core.event.event.Event`

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import stalta_pick
    >>> st = read()
    >>> event = stalta_pick(st, stalen=0.2, ltalen=4, trig_on=10,
    ...             trig_off=1, freqmin=3.0, freqmax=20.0)
    >>> print(event.creation_info.author)
    EQcorrscan

    .. warning::
        This function is not designed for accurate picking, rather it can give
        a first idea of whether picks may be possible.  Proceed with caution.
    """
    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='stalta'))
    picks = []
    for tr in stream:
        # We are going to assume, for now, that if the pick is made on the
        # horizontal channel then it is an S, otherwise we will assume it is
        # a P-phase: obviously a bad assumption...
        if tr.stats.channel[-1] == 'Z':
            phase = 'P'
        else:
            phase = 'S'
        if freqmin and freqmax:
            tr.detrend('simple')
            tr.filter('bandpass',
                      freqmin=freqmin,
                      freqmax=freqmax,
                      corners=3,
                      zerophase=True)
        df = tr.stats.sampling_rate
        cft = classic_sta_lta(tr.data, int(stalen * df), int(ltalen * df))
        triggers = trigger_onset(cft, trig_on, trig_off)
        for trigger in triggers:
            on = tr.stats.starttime + (trigger[0] / df)
            # off = tr.stats.starttime + (trigger[1] / df)
            wav_id = WaveformStreamID(station_code=tr.stats.station,
                                      channel_code=tr.stats.channel,
                                      network_code=tr.stats.network)
            p = Pick(waveform_id=wav_id, phase_hint=phase, time=on)
            Logger.info('Pick made: {0}'.format(p))
            picks.append(p)
    # QC picks
    pick_stations = list(set([pick.waveform_id.station_code
                              for pick in picks]))
    for pick_station in pick_stations:
        station_picks = [
            pick for pick in picks
            if pick.waveform_id.station_code == pick_station
        ]
        # If P-pick is after S-picks, remove it.
        p_time = [
            pick.time for pick in station_picks if pick.phase_hint == 'P'
        ]
        s_time = [
            pick.time for pick in station_picks if pick.phase_hint == 'S'
        ]
        if p_time > s_time:
            p_pick = [pick for pick in station_picks if pick.phase_hint == 'P']
            for pick in p_pick:
                Logger.info('P pick after S pick, removing P pick')
                picks.remove(pick)
    event.picks = picks
    if show:
        plotting.pretty_template_plot(stream,
                                      event=event,
                                      title='Autopicks',
                                      size=(8, 9))
    if len(event.picks) > 0:
        event.origins[0].time = min([pick.time for pick in event.picks]) - 1
        # event.origins[0].latitude = float('nan')
        # event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    return event
Beispiel #17
0
 def _comment(self, text):
     comment = Comment()
     comment.text = text
     comment.resource_id = ResourceIdentifier(prefix=self.res_id_prefix)
     return comment
Beispiel #18
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot, latitude=lat, longitude=lon,
                   depth=depjma, depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name,
                                         'origin', 'JMA')
    m_jma = Magnitude(mag=magjma, magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name,
                                         'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot, latitude=lat, longitude=lon,
                  depth=depmt, region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name,
                                        'origin', 'MT')
    m_mt = Magnitude(mag=magmt, magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name,
                                        'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name,
                                           "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo, comments=[cm],
                      tensor=tensor, variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name,
                                      'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Beispiel #19
0
    def _calculate_event(self,
                         template=None,
                         template_st=None,
                         estimate_origin=True,
                         correct_prepick=True):
        """
        Calculate an event for this detection using a given template.

        :type template: Template
        :param template: The template that made this detection
        :type template_st: `obspy.core.stream.Stream`
        :param template_st:
            Template stream, used to calculate pick times, not needed if
            template is given.
        :type estimate_origin: bool
        :param estimate_origin:
            Whether to include an estimate of the origin based on the template
            origin.
        :type correct_prepick: bool
        :param correct_prepick:
            Whether to apply the prepick correction defined in the template.
            Only applicable if template is not None

        .. rubric:: Note
            Works in place on Detection - over-writes previous events.
            Corrects for prepick if template given.
        """
        if template is not None and template.name != self.template_name:
            Logger.info("Template names do not match: {0}: {1}".format(
                template.name, self.template_name))
            return
        # Detect time must be valid QuakeML uri within resource_id.
        # This will write a formatted string which is still
        # readable by UTCDateTime
        det_time = str(self.detect_time.strftime('%Y%m%dT%H%M%S.%f'))
        ev = Event(resource_id=ResourceIdentifier(
            id=self.template_name + '_' + det_time, prefix='smi:local'))
        ev.creation_info = CreationInfo(author='EQcorrscan',
                                        creation_time=UTCDateTime())
        ev.comments.append(
            Comment(text="Template: {0}".format(self.template_name)))
        ev.comments.append(
            Comment(text='threshold={0}'.format(self.threshold)))
        ev.comments.append(
            Comment(text='detect_val={0}'.format(self.detect_val)))
        if self.chans is not None:
            ev.comments.append(
                Comment(text='channels used: {0}'.format(' '.join(
                    [str(pair) for pair in self.chans]))))
        if template is not None:
            template_st = template.st
            if correct_prepick:
                template_prepick = template.prepick
            else:
                template_prepick = 0
            try:
                template_picks = template.event.picks
            except AttributeError:
                template_picks = []
        else:
            template_prepick = 0
            template_picks = []
        min_template_tm = min([tr.stats.starttime for tr in template_st])
        for tr in template_st:
            if (tr.stats.station, tr.stats.channel) \
                    not in self.chans:
                continue
            elif tr.stats.__contains__("not_in_original"):
                continue
            elif np.all(np.isnan(tr.data)):
                continue  # The channel contains no data and was not used.
            else:
                pick_time = self.detect_time + (tr.stats.starttime -
                                                min_template_tm)
                pick_time += template_prepick
                new_pick = Pick(time=pick_time,
                                waveform_id=WaveformStreamID(
                                    network_code=tr.stats.network,
                                    station_code=tr.stats.station,
                                    channel_code=tr.stats.channel,
                                    location_code=tr.stats.location))
                template_pick = [
                    p for p in template_picks
                    if p.waveform_id.get_seed_string() ==
                    new_pick.waveform_id.get_seed_string()
                ]
                if len(template_pick) == 0:
                    new_pick.phase_hint = None
                elif len(template_pick) == 1:
                    new_pick.phase_hint = template_pick[0].phase_hint
                else:
                    # Multiple picks for this trace in template
                    similar_traces = template_st.select(id=tr.id)
                    similar_traces.sort()
                    _index = similar_traces.traces.index(tr)
                    try:
                        new_pick.phase_hint = sorted(
                            template_pick,
                            key=lambda p: p.time)[_index].phase_hint
                    except IndexError:
                        Logger.error(f"No pick for trace: {tr.id}")
                ev.picks.append(new_pick)
        if estimate_origin and template is not None\
                and template.event is not None:
            try:
                template_origin = (template.event.preferred_origin()
                                   or template.event.origins[0])
            except IndexError:
                template_origin = None
            if template_origin:
                for pick in ev.picks:
                    comparison_pick = [
                        p for p in template.event.picks
                        if p.waveform_id.get_seed_string() ==
                        pick.waveform_id.get_seed_string()
                    ]
                    comparison_pick = [
                        p for p in comparison_pick
                        if p.phase_hint == pick.phase_hint
                    ]
                    if len(comparison_pick) > 0:
                        break
                else:
                    Logger.error("Could not compute relative origin: no picks")
                    self.event = ev
                    return
                origin_time = pick.time - (comparison_pick[0].time -
                                           template_origin.time)
                # Calculate based on difference between pick and origin?
                _origin = Origin(
                    ResourceIdentifier(id="EQcorrscan/{0}_{1}".format(
                        self.template_name, det_time),
                                       prefix="smi:local"),
                    time=origin_time,
                    evaluation_mode="automatic",
                    evaluation_status="preliminary",
                    creation_info=CreationInfo(author='EQcorrscan',
                                               creation_time=UTCDateTime()),
                    comments=[
                        Comment(
                            text=
                            "Origin automatically assigned based on template"
                            " origin: use with caution.")
                    ],
                    latitude=template_origin.latitude,
                    longitude=template_origin.longitude,
                    depth=template_origin.depth,
                    time_errors=template_origin.time_errors,
                    latitude_errors=template_origin.latitude_errors,
                    longitude_errors=template_origin.longitude_errors,
                    depth_errors=template_origin.depth_errors,
                    depth_type=template_origin.depth_type,
                    time_fixed=False,
                    epicenter_fixed=template_origin.epicenter_fixed,
                    reference_system_id=template_origin.reference_system_id,
                    method_id=template_origin.method_id,
                    earth_model_id=template_origin.earth_model_id,
                    origin_type=template_origin.origin_type,
                    origin_uncertainty=template_origin.origin_uncertainty,
                    region=template_origin.region)
                ev.origins = [_origin]
        self.event = ev
        return self
Beispiel #20
0
    def construct(self, method, lowcut, highcut, samp_rate, filt_order,
                  length, prepick, swin="all", process_len=86400,
                  all_horiz=False, delayed=True, plot=False, plotdir=None,
                  min_snr=None, parallel=False, num_cores=False,
                  skip_short_chans=False, save_progress=False, **kwargs):
        """
        Generate a Tribe of Templates.

        :type method: str
        :param method:
            Method of Tribe generation. Possible options are: `from_client`,
            `from_seishub`, `from_meta_file`.  See below on the additional
            required arguments for each method.
        :type lowcut: float
        :param lowcut:
            Low cut (Hz), if set to None will not apply a lowcut
        :type highcut: float
        :param highcut:
            High cut (Hz), if set to None will not apply a highcut.
        :type samp_rate: float
        :param samp_rate:
            New sampling rate in Hz.
        :type filt_order: int
        :param filt_order:
            Filter level (number of corners).
        :type length: float
        :param length: Length of template waveform in seconds.
        :type prepick: float
        :param prepick: Pre-pick time in seconds
        :type swin: str
        :param swin:
            P, S, P_all, S_all or all, defaults to all: see note in
            :func:`eqcorrscan.core.template_gen.template_gen`
        :type process_len: int
        :param process_len: Length of data in seconds to download and process.
        :type all_horiz: bool
        :param all_horiz:
            To use both horizontal channels even if there is only a pick on
            one of them.  Defaults to False.
        :type delayed: bool
        :param delayed: If True, each channel will begin relative to it's own
            pick-time, if set to False, each channel will begin at the same
            time.
        :type plot: bool
        :param plot: Plot templates or not.
        :type plotdir: str
        :param plotdir:
            The path to save plots to. If `plotdir=None` (default) then the
            figure will be shown on screen.
        :type min_snr: float
        :param min_snr:
            Minimum signal-to-noise ratio for a channel to be included in the
            template, where signal-to-noise ratio is calculated as the ratio
            of the maximum amplitude in the template window to the rms
            amplitude in the whole window given.
        :type parallel: bool
        :param parallel: Whether to process data in parallel or not.
        :type num_cores: int
        :param num_cores:
            Number of cores to try and use, if False and parallel=True,
            will use either all your cores, or as many traces as in the data
            (whichever is smaller).
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting template set at every data step or
            not. Useful for long-running processes.
        :type skip_short_chans: bool
        :param skip_short_chans:
            Whether to ignore channels that have insufficient length data or
            not. Useful when the quality of data is not known, e.g. when
            downloading old, possibly triggered data from a datacentre
        :type save_progress: bool
        :param save_progress:
            Whether to save the resulting party at every data step or not.
            Useful for long-running processes.

        .. note::
            *Method specific arguments:*

            - `from_client` requires:
                :param str client_id:
                    string passable by obspy to generate Client, or any object
                    with a `get_waveforms` method, including a Client instance.
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_seishub` requires:
                :param str url: url to seishub database
                :param `obspy.core.event.Catalog` catalog:
                    Catalog of events to generate template for
                :param float data_pad: Pad length for data-downloads in seconds
            - `from_meta_file` requires:
                :param str meta_file:
                    Path to obspy-readable event file, or an obspy Catalog
                :param `obspy.core.stream.Stream` st:
                    Stream containing waveform data for template. Note that
                    this should be the same length of stream as you will use
                    for the continuous detection, e.g. if you detect in
                    day-long files, give this a day-long file!
                :param bool process:
                    Whether to process the data or not, defaults to True.

        .. Note::
            Method: `from_sac` is not supported by Tribe.construct and must
            use Template.construct.

        .. Note:: Templates will be named according to their start-time.
        """
        templates, catalog, process_lengths = template_gen.template_gen(
            method=method, lowcut=lowcut, highcut=highcut, length=length,
            filt_order=filt_order, samp_rate=samp_rate, prepick=prepick,
            return_event=True, save_progress=save_progress, swin=swin,
            process_len=process_len, all_horiz=all_horiz, plotdir=plotdir,
            delayed=delayed, plot=plot, min_snr=min_snr, parallel=parallel,
            num_cores=num_cores, skip_short_chans=skip_short_chans,
            **kwargs)
        for template, event, process_len in zip(templates, catalog,
                                                process_lengths):
            t = Template()
            for tr in template:
                if not np.any(tr.data.astype(np.float16)):
                    Logger.warning('Data are zero in float16, missing data,'
                                   ' will not use: {0}'.format(tr.id))
                    template.remove(tr)
            if len(template) == 0:
                Logger.error('Empty Template')
                continue
            t.st = template
            t.name = template.sort(['starttime'])[0]. \
                stats.starttime.strftime('%Y_%m_%dt%H_%M_%S')
            t.lowcut = lowcut
            t.highcut = highcut
            t.filt_order = filt_order
            t.samp_rate = samp_rate
            t.process_length = process_len
            t.prepick = prepick
            event.comments.append(Comment(
                text="eqcorrscan_template_" + t.name,
                creation_info=CreationInfo(agency='eqcorrscan',
                                           author=getpass.getuser())))
            t.event = event
            self.templates.append(t)
        return self
Beispiel #21
0
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.io.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left ``None``, NonLinLoc (x, y, z) output is left unchanged (e.g. if
        it is in geographical coordinates already like for NonLinLoc in
        global mode).
        The function should accept three arguments x, y, z (each of type
        :class:`numpy.ndarray`) and return a tuple of three
        :class:`numpy.ndarray` (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        ``pick_id`` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    lines = data.splitlines()

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    # determine indices of block start/end of the NLLOC output file
    indices_hyp = [None, None]
    indices_phases = [None, None]
    for i, line in enumerate(lines):
        if line.startswith("NLLOC "):
            indices_hyp[0] = i
        elif line.startswith("END_NLLOC"):
            indices_hyp[1] = i
        elif line.startswith("PHASE "):
            indices_phases[0] = i
        elif line.startswith("END_PHASE"):
            indices_phases[1] = i
    if any([i is None for i in indices_hyp]):
        msg = ("NLLOC HYP file seems corrupt,"
               " could not detect 'NLLOC' and 'END_NLLOC' lines.")
        raise RuntimeError(msg)
    # strip any other lines around NLLOC block
    lines = lines[indices_hyp[0]:indices_hyp[1]]

    # extract PHASES lines (if any)
    if any(indices_phases):
        if not all(indices_phases):
            msg = ("NLLOC HYP file seems corrupt, 'PHASE' block is corrupt.")
            raise RuntimeError(msg)
        i1, i2 = indices_phases
        lines, phases_lines = lines[:i1] + lines[i2 + 1:], lines[i1 + 1:i2]
    else:
        phases_lines = []

    lines = dict([line.split(None, 1) for line in lines])
    line = lines["SIGNATURE"]

    line = line.rstrip().split('"')[1]
    signature, version, date, time = line.rsplit(" ", 3)
    creation_time = UTCDateTime().strptime(date + time, str("%d%b%Y%Hh%Mm%S"))

    # maximum likelihood origin location info line
    line = lines["HYPOCENTER"]

    x, y, z = map(float, line.split()[1:7:2])

    if coordinate_converter:
        x, y, z = coordinate_converter(x, y, z)

    # origin time info line
    line = lines["GEOGRAPHIC"]

    year, month, day, hour, minute = map(int, line.split()[1:6])
    seconds = float(line.split()[6])
    time = UTCDateTime(year, month, day, hour, minute, seconds)

    # distribution statistics line
    line = lines["STATISTICS"]
    covariance_xx = float(line.split()[7])
    covariance_yy = float(line.split()[13])
    covariance_zz = float(line.split()[17])
    stats_info_string = str(
        "Note: Depth/Latitude/Longitude errors are calculated from covariance "
        "matrix as 1D marginal (Lon/Lat errors as great circle degrees) "
        "while OriginUncertainty min/max horizontal errors are calculated "
        "from 2D error ellipsoid and are therefore seemingly higher compared "
        "to 1D errors. Error estimates can be reconstructed from the "
        "following original NonLinLoc error statistics line:\nSTATISTICS " +
        lines["STATISTICS"])

    # goto location quality info line
    line = lines["QML_OriginQuality"].split()

    (assoc_phase_count, used_phase_count, assoc_station_count,
     used_station_count, depth_phase_count) = map(int, line[1:11:2])
    stderr, az_gap, sec_az_gap = map(float, line[11:17:2])
    gt_level = line[17]
    min_dist, max_dist, med_dist = map(float, line[19:25:2])

    # goto location quality info line
    line = lines["QML_OriginUncertainty"]

    hor_unc, min_hor_unc, max_hor_unc, hor_unc_azim = \
        map(float, line.split()[1:9:2])

    # assign origin info
    event = Event()
    cat = Catalog(events=[event])
    o = Origin()
    event.origins = [o]
    o.origin_uncertainty = OriginUncertainty()
    o.quality = OriginQuality()
    ou = o.origin_uncertainty
    oq = o.quality
    o.comments.append(Comment(text=stats_info_string))

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    event.creation_info = CreationInfo(creation_time=creation_time,
                                       version=version)
    event.creation_info.version = version
    o.creation_info = CreationInfo(creation_time=creation_time,
                                   version=version)

    # negative values can appear on diagonal of covariance matrix due to a
    # precision problem in NLLoc implementation when location coordinates are
    # large compared to the covariances.
    o.longitude = x
    try:
        o.longitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_xx))
    except ValueError:
        if covariance_xx < 0:
            msg = ("Negative value in XX value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.latitude = y
    try:
        o.latitude_errors.uncertainty = kilometer2degrees(sqrt(covariance_yy))
    except ValueError:
        if covariance_yy < 0:
            msg = ("Negative value in YY value of covariance matrix, not "
                   "setting longitude error (epicentral uncertainties will "
                   "still be set in origin uncertainty).")
            warnings.warn(msg)
        else:
            raise
    o.depth = z * 1e3  # meters!
    o.depth_errors.uncertainty = sqrt(covariance_zz) * 1e3  # meters!
    o.depth_errors.confidence_level = 68
    o.depth_type = str("from location")
    o.time = time

    ou.horizontal_uncertainty = hor_unc
    ou.min_horizontal_uncertainty = min_hor_unc
    ou.max_horizontal_uncertainty = max_hor_unc
    # values of -1 seem to be used for unset values, set to None
    for field in ("horizontal_uncertainty", "min_horizontal_uncertainty",
                  "max_horizontal_uncertainty"):
        if ou.get(field, -1) == -1:
            ou[field] = None
        else:
            ou[field] *= 1e3  # meters!
    ou.azimuth_max_horizontal_uncertainty = hor_unc_azim
    ou.preferred_description = str("uncertainty ellipse")
    ou.confidence_level = 68  # NonLinLoc in general uses 1-sigma (68%) level

    oq.standard_error = stderr
    oq.azimuthal_gap = az_gap
    oq.secondary_azimuthal_gap = sec_az_gap
    oq.used_phase_count = used_phase_count
    oq.used_station_count = used_station_count
    oq.associated_phase_count = assoc_phase_count
    oq.associated_station_count = assoc_station_count
    oq.depth_phase_count = depth_phase_count
    oq.ground_truth_level = gt_level
    oq.minimum_distance = kilometer2degrees(min_dist)
    oq.maximum_distance = kilometer2degrees(max_dist)
    oq.median_distance = kilometer2degrees(med_dist)

    # go through all phase info lines
    for line in phases_lines:
        line = line.split()
        arrival = Arrival()
        o.arrivals.append(arrival)
        station = str(line[0])
        phase = str(line[4])
        arrival.phase = phase
        arrival.distance = kilometer2degrees(float(line[21]))
        arrival.azimuth = float(line[23])
        arrival.takeoff_angle = float(line[24])
        arrival.time_residual = float(line[16])
        arrival.time_weight = float(line[17])
        pick = Pick()
        wid = WaveformStreamID(station_code=station)
        date, hourmin, sec = map(str, line[6:9])
        t = UTCDateTime().strptime(date + hourmin, "%Y%m%d%H%M") + float(sec)
        pick.waveform_id = wid
        pick.time = t
        pick.time_errors.uncertainty = float(line[10])
        pick.phase_hint = phase
        pick.onset = ONSETS.get(line[3].lower(), None)
        pick.polarity = POLARITIES.get(line[5].lower(), None)
        # try to determine original pick for each arrival
        for pick_ in original_picks:
            wid = pick_.waveform_id
            if station == wid.station_code and phase == pick_.phase_hint:
                pick = pick_
                break
        else:
            # warn if original picks were specified and we could not associate
            # the arrival correctly
            if original_picks:
                msg = ("Could not determine corresponding original pick for "
                       "arrival. "
                       "Falling back to pick information in NonLinLoc "
                       "hypocenter-phase file.")
                warnings.warn(msg)
        event.picks.append(pick)
        arrival.pick_id = pick.resource_id

    return cat
Beispiel #22
0
 def _make_comment(self, text):
     id_ = self._construct_id(['comment'], add_hash=True)
     comment = Comment(text=text, resource_id=id_)
     return comment
Beispiel #23
0
 def _comment(self, text):
     comment = Comment()
     comment.text = text
     comment.resource_id = ResourceIdentifier(prefix=self.res_id_prefix)
     return comment
Beispiel #24
0
def detections_2_cat(detections, template_dict, stream, temp_prepick, max_lag, cc_thresh,
                     extract_pre_pick=3.0, extract_post_pick=7.0, write_wav=False, debug=0):
    r"""Function to create a catalog from a list of detections, adjusting template pick \
    times using cross correlation with data stream at the time of detection.

    :type detections: list of DETECTION objects
    :param detections: Detections which we want to extract and locate.
    :type template_dict: dict
    :param template_dict: Dictionary of template name: template stream for the entire \
        catalog. Template names must be in the format found in the DETECTION objects.
    :type stream: obspy.Stream
    :param stream: stream encompassing time span of the detections. Will be used for pick \
        refinement by cross correlation. Should be fed a stream processed in the same way \
        as the streams in template dict (and in the same way that they were processed \
        during matched filtering). The waveforms will not be processed here.
    :type write_wav: bool or str
    :param write_wav: If false, will not write detection waveforms to miniseed files. \
        Otherwise, specify a directory to write the templates to. Will use name \
        template_name_detection_time.mseed.
    :returns: :class: obspy.Catalog
    """

    from obspy import UTCDateTime, Catalog, Stream
    from obspy.core.event import ResourceIdentifier, Event, Pick, CreationInfo, Comment, WaveformStreamID
    from obspy.signal.cross_correlation import xcorr
    from eqcorrscan.utils import plotting

    #XXX TODO Scripts havent been saving the actual detection objects so we cannot make
    #XXX TODO use of DETECTION.chans. Would be useful.

    # Copy stream out of the way
    st = stream.copy()
    # Create nested dictionary of delays template_name: stachan: delay
    # dict.items() works in both python 2 and 3 but is memory inefficient in 2 as both vars are
    # read into memory as lists
    delays = {}
    for name, temp in template_dict.items():
        sorted_temp = temp.sort(['starttime'])
        stachans = [(tr.stats.station, tr.stats.channel, tr.stats.network)
                    for tr in sorted_temp]
        mintime = sorted_temp[0].stats.starttime
        delays[name] = {(tr.stats.station, tr.stats.channel): tr.stats.starttime - mintime
                        for tr in sorted_temp}
    # Loop over all detections, saving each as a new event in a catalog
    new_cat = Catalog()
    for detection in detections:
        if write_wav:
            new_stream = Stream()
        if hasattr(detection, 'event'):
            new_event = detection.event
        else:
            rid = ResourceIdentifier(id=detection.template_name + '_' +\
                                        detection.detect_time.strftime('%Y%m%dT%H%M%S.%f'),
                                     prefix='smi:local')
            new_event = Event(resource_id=rid)
            cr_i = CreationInfo(author='EQcorrscan',
                                creation_time=UTCDateTime())
            new_event.creation_info = cr_i
            thresh_str = 'threshold=' + str(detection.threshold)
            ccc_str = 'detect_val=' + str(detection.detect_val)
            det_time_str = 'det_time=%s' % str(detection.detect_time)
            if detection.chans:
                used_chans = 'channels used: ' + \
                             ' '.join([str(pair) for pair in detection.chans])
                new_event.comments.append(Comment(text=used_chans))
            new_event.comments.append(Comment(text=thresh_str))
            new_event.comments.append(Comment(text=ccc_str))
            new_event.comments.append(Comment(text=det_time_str))
        template = template_dict[detection.template_name]
        temp_len = template[0].stats.npts * template[0].stats.sampling_rate
        if template.sort(['starttime'])[0].stats.starttime == detection.detect_time:
            print('Template %s detected itself at %s.' % (detection.template_name, str(detection.detect_time)))
            new_event.resource_id = ResourceIdentifier(id=detection.template_name + '_self',
                                                       prefix='smi:local')
        if debug >= 2:
            print('Plotting detection for template: %s' % detection.template_name)
            plt_st = Stream([st.select(station=tr.stats.station,
                                       channel=tr.stats.channel)[0].slice(detection.detect_time-extract_pre_pick,
                                                                          detection.detect_time+extract_post_pick)
                             for tr in template if len(st.select(station=tr.stats.station,
                                                                 channel=tr.stats.channel)) > 0])
            plotting.detection_multiplot(plt_st, template, [detection.detect_time.datetime])
        # Loop over each trace in the template, correcting picks for new event if need be
        for tr in template:
            sta = tr.stats.station
            chan = tr.stats.channel
            if len(st.select(station=sta, channel=chan)) != 0:
                st_tr = st.select(station=sta, channel=chan)[0]
            else:
                print('No stream for %s: %s' % (sta, chan))
                continue
            st_tr_pick = detection.detect_time + delays[detection.template_name][(sta, chan)] + temp_prepick
            i, absval, full_corr = xcorr(tr, st_tr.slice(st_tr_pick - temp_prepick,
                                                            st_tr_pick - temp_prepick + temp_len),
                                            shift_len=max_lag, full_xcorr=True)
            ccval = max(full_corr)
            index = np.argmax(full_corr) - max_lag
            pk_str = 'ccval=' + str(ccval)
            if index == 0 or index == max_lag * 2:
                msg = 'Correlation correction at max_lag. Consider increasing max_lag.'
                warnings.warn(msg)
            if debug >= 3:
                print('Plotting full correlation function')
                print('index: %d' % index)
                print('max_ccval: %.2f' % ccval)
                plt.plot(full_corr)
                plt.show()
                plt.close()
            if ccval > cc_thresh:
                print('Threshold exceeded at %s: %s' % (sta, chan))
                pick_tm = st_tr_pick + (index / tr.stats.sampling_rate)
            else:
                print('Correlation at %s: %s not good enough to correct pick' % (sta, chan))
                pick_tm = st_tr_pick
            if tr.stats.channel[-1] in ['Z']:
                phase_hint = 'P'
            elif tr.stats.channel[-1] in ['N', 'E', '1', '2']:
                phase_hint = 'S'
            wv_id = WaveformStreamID(network_code=tr.stats.network,
                                     station_code=tr.stats.station,
                                     channel_code=tr.stats.channel)
            new_event.picks.append(Pick(time=pick_tm, waveform_id=wv_id, phase_hint=phase_hint,
                                        comments=[Comment(text=pk_str)]))
            if write_wav:
                    new_stream.append(st_tr.slice(starttime=pick_tm - extract_pre_pick,
                                                  endtime=pick_tm + extract_post_pick))
        # Append to new catalog
        new_cat += new_event
        if write_wav:
            filename = '%s%s.mseed' % (write_wav, str(new_event.resource_id))
            print('Writing new stream for detection to %s' % filename)
            new_stream.write(filename, format='MSEED')
    return new_cat
Beispiel #25
0
def xcorr_pick_family(family, stream, shift_len=0.2, min_cc=0.4,
                      horizontal_chans=['E', 'N', '1', '2'],
                      vertical_chans=['Z'], cores=1, interpolate=False,
                      plot=False, plotdir=None):
    """
    Compute cross-correlation picks for detections in a family.

    :type family: `eqcorrscan.core.match_filter.family.Family`
    :param family: Family to calculate correlation picks for.
    :type stream: `obspy.core.stream.Stream`
    :param stream:
        Data stream containing data for all (or a subset of) detections in
        the Family
    :type shift_len: float
    :param shift_len:
        Shift length allowed for the pick in seconds, will be plus/minus this
        amount - default=0.2
    :type min_cc: float
    :param min_cc:
        Minimum cross-correlation value to be considered a pick, default=0.4.
    :type horizontal_chans: list
    :param horizontal_chans:
        List of channel endings for horizontal-channels, on which S-picks will
        be made.
    :type vertical_chans: list
    :param vertical_chans:
        List of channel endings for vertical-channels, on which P-picks will
        be made.
    :type cores: int
    :param cores:
        Number of cores to use in parallel processing, defaults to one.
    :type interpolate: bool
    :param interpolate:
        Interpolate the correlation function to achieve sub-sample precision.
    :type plot: bool
    :param plot:
        To generate a plot for every detection or not, defaults to False
    :type plotdir: str
    :param plotdir:
        Path to plotting folder, plots will be output here.

    :return: Dictionary of picked events keyed by detection id.
    """
    picked_dict = {}
    delta = family.template.st[0].stats.delta
    detect_streams_dict = _prepare_data(
        family=family, detect_data=stream, shift_len=shift_len)
    detection_ids = list(detect_streams_dict.keys())
    detect_streams = [detect_streams_dict[detection_id]
                      for detection_id in detection_ids]
    if len(detect_streams) == 0:
        Logger.warning("No appropriate data found, check your family and "
                       "detections - make sure seed ids match")
        return picked_dict
    if len(detect_streams) != len(family):
        Logger.warning("Not all detections have matching data. "
                       "Proceeding anyway. HINT: Make sure SEED IDs match")
    # Correlation function needs a list of streams, we need to maintain order.
    ccc, chans = _concatenate_and_correlate(
        streams=detect_streams, template=family.template.st, cores=cores)
    for i, detection_id in enumerate(detection_ids):
        detection = [d for d in family.detections if d.id == detection_id][0]
        correlations = ccc[i]
        picked_chans = chans[i]
        detect_stream = detect_streams_dict[detection_id]
        checksum, cccsum, used_chans = 0.0, 0.0, 0
        event = Event()
        for correlation, stachan in zip(correlations, picked_chans):
            if not stachan.used:
                continue
            tr = detect_stream.select(
                station=stachan.channel[0], channel=stachan.channel[1])[0]
            if interpolate:
                shift, cc_max = _xcorr_interp(correlation, dt=delta)
            else:
                cc_max = np.amax(correlation)
                shift = np.argmax(correlation) * delta
            if np.isnan(cc_max):  # pragma: no cover
                Logger.error(
                    'Problematic trace, no cross correlation possible')
                continue
            picktime = tr.stats.starttime + shift
            checksum += cc_max
            used_chans += 1
            if cc_max < min_cc:
                Logger.debug('Correlation of {0} is below threshold, not '
                             'using'.format(cc_max))
                continue
            cccsum += cc_max
            phase = None
            if stachan.channel[1][-1] in vertical_chans:
                phase = 'P'
            elif stachan.channel[1][-1] in horizontal_chans:
                phase = 'S'
            _waveform_id = WaveformStreamID(seed_string=tr.id)
            event.picks.append(Pick(
                waveform_id=_waveform_id, time=picktime,
                method_id=ResourceIdentifier('EQcorrscan'), phase_hint=phase,
                creation_info='eqcorrscan.core.lag_calc',
                evaluation_mode='automatic',
                comments=[Comment(text='cc_max={0}'.format(cc_max))]))
        event.resource_id = ResourceIdentifier(detection_id)
        event.comments.append(Comment(text="detect_val={0}".format(cccsum)))
        # Add template-name as comment to events
        event.comments.append(Comment(
            text="Detected using template: {0}".format(family.template.name)))
        if used_chans == detection.no_chans:  # pragma: no cover
            if detection.detect_val is not None and\
               checksum - detection.detect_val < -(0.3 * detection.detect_val):
                msg = ('lag-calc has decreased cccsum from %f to %f - '
                       % (detection.detect_val, checksum))
                Logger.error(msg)
                continue
        else:
            Logger.warning(
                'Cannot check if cccsum is better, used {0} channels for '
                'detection, but {1} are used here'.format(
                    detection.no_chans, used_chans))
        picked_dict.update({detection_id: event})
    if plot:  # pragma: no cover
        for i, event in enumerate(picked_dict.values()):
            if len(event.picks) == 0:
                continue
            plot_stream = detect_streams[i].copy()
            template_plot = family.template.st.copy()
            pick_stachans = [(pick.waveform_id.station_code,
                              pick.waveform_id.channel_code)
                             for pick in event.picks]
            for tr in plot_stream:
                if (tr.stats.station, tr.stats.channel) \
                        not in pick_stachans:
                    plot_stream.remove(tr)
            for tr in template_plot:
                if (tr.stats.station, tr.stats.channel) \
                        not in pick_stachans:
                    template_plot.remove(tr)
            if plotdir is not None:
                if not os.path.isdir(plotdir):
                    os.makedirs(plotdir)
                savefile = "{plotdir}/{rid}.png".format(
                    plotdir=plotdir, rid=event.resource_id.id)
                plot_repicked(template=template_plot, picks=event.picks,
                              det_stream=plot_stream, show=False, save=True,
                              savefile=savefile)
            else:
                plot_repicked(template=template_plot, picks=event.picks,
                              det_stream=plot_stream, show=True)
    return picked_dict
Beispiel #26
0
def __read_single_fnetmt_entry(line, **kwargs):
    """
    Reads a single F-net moment tensor solution to a
    :class:`~obspy.core.event.Event` object.

    :param line: String containing moment tensor information.
    :type line: str.
    """

    a = line.split()
    try:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S.%f')
    except ValueError:
        ot = UTCDateTime().strptime(a[0], '%Y/%m/%d,%H:%M:%S')
    lat, lon, depjma, magjma = map(float, a[1:5])
    depjma *= 1000
    region = a[5]
    strike = tuple(map(int, a[6].split(';')))
    dip = tuple(map(int, a[7].split(';')))
    rake = tuple(map(int, a[8].split(';')))
    mo = float(a[9])
    depmt = float(a[10]) * 1000
    magmt = float(a[11])
    var_red = float(a[12])
    mxx, mxy, mxz, myy, myz, mzz, unit = map(float, a[13:20])

    event_name = util.gen_sc3_id(ot)
    e = Event(event_type="earthquake")
    e.resource_id = _get_resource_id(event_name, 'event')

    # Standard JMA solution
    o_jma = Origin(time=ot,
                   latitude=lat,
                   longitude=lon,
                   depth=depjma,
                   depth_type="from location",
                   region=region)
    o_jma.resource_id = _get_resource_id(event_name, 'origin', 'JMA')
    m_jma = Magnitude(mag=magjma,
                      magnitude_type='ML',
                      origin_id=o_jma.resource_id)
    m_jma.resource_id = _get_resource_id(event_name, 'magnitude', 'JMA')
    # MT solution
    o_mt = Origin(time=ot,
                  latitude=lat,
                  longitude=lon,
                  depth=depmt,
                  region=region,
                  depth_type="from moment tensor inversion")
    o_mt.resource_id = _get_resource_id(event_name, 'origin', 'MT')
    m_mt = Magnitude(mag=magmt,
                     magnitude_type='Mw',
                     origin_id=o_mt.resource_id)
    m_mt.resource_id = _get_resource_id(event_name, 'magnitude', 'MT')
    foc_mec = FocalMechanism(triggering_origin_id=o_jma.resource_id)
    foc_mec.resource_id = _get_resource_id(event_name, "focal_mechanism")
    nod1 = NodalPlane(strike=strike[0], dip=dip[0], rake=rake[0])
    nod2 = NodalPlane(strike=strike[1], dip=dip[1], rake=rake[1])
    nod = NodalPlanes(nodal_plane_1=nod1, nodal_plane_2=nod2)
    foc_mec.nodal_planes = nod

    tensor = Tensor(m_rr=mxx, m_tt=myy, m_pp=mzz, m_rt=mxy, m_rp=mxz, m_tp=myz)
    cm = Comment(text="Basis system: North,East,Down (Jost and \
    Herrmann 1989")
    cm.resource_id = _get_resource_id(event_name, 'comment', 'mt')
    mt = MomentTensor(derived_origin_id=o_mt.resource_id,
                      moment_magnitude_id=m_mt.resource_id,
                      scalar_moment=mo,
                      comments=[cm],
                      tensor=tensor,
                      variance_reduction=var_red)
    mt.resource_id = _get_resource_id(event_name, 'moment_tensor')
    foc_mec.moment_tensor = mt
    e.origins = [o_jma, o_mt]
    e.magnitudes = [m_jma, m_mt]
    e.focal_mechanisms = [foc_mec]
    e.preferred_magnitude_id = m_mt.resource_id.id
    e.preferred_origin_id = o_mt.resource_id.id
    e.preferred_focal_mechanism_id = foc_mec.resource_id.id
    return e
Beispiel #27
0
def _channel_loop(detection,
                  template,
                  min_cc,
                  detection_id,
                  interpolate,
                  i,
                  pre_lag_ccsum=None,
                  detect_chans=0):
    """
    Inner loop for correlating and assigning picks.

    Utility function to take a stream of data for the detected event and write
    maximum correlation to absolute time as picks in an obspy.core.event.Event
    object.
    Only outputs picks for picks above min_cc.

    :type detection: obspy.core.stream.Stream
    :param detection:
        Stream of data for the slave event detected using template.
    :type template: obspy.core.stream.Stream
    :param template: Stream of data as the template for the detection.
    :type min_cc: float
    :param min_cc: Minimum cross-correlation value to allow a pick to be made.
    :type detection_id: str
    :param detection_id: Detection ID to associate the event with.
    :type interpolate: bool
    :param interpolate:
        Interpolate the correlation function to achieve sub-sample precision.
    :type i: int
    :param i:
        Used to track which process has occurred when running in parallel.
    :type pre_lag_ccsum: float
    :param pre_lag_ccsum:
        Cross-correlation sum before lag-calc, will check that the
        cross-correlation sum is increased by lag-calc (using all channels,
        ignoring min_cc)
    :type detect_chans: int
    :param detect_chans:
        Number of channels originally used in detections, must match the number
        used here to allow for cccsum checking.

    :returns:
        Event object containing network, station, channel and pick information.
    :rtype: :class:`obspy.core.event.Event`
    """
    event = Event()
    s_stachans = {}
    used_s_sta = []
    cccsum = 0
    checksum = 0
    used_chans = 0
    for tr in template:
        temp_net = tr.stats.network
        temp_sta = tr.stats.station
        temp_chan = tr.stats.channel
        image = detection.select(station=temp_sta, channel=temp_chan)
        if image:
            if interpolate:
                try:
                    ccc = normxcorr2(tr.data, image[0].data)
                    shift, cc_max = _xcorr_interp(ccc=ccc,
                                                  dt=image[0].stats.delta)
                except IndexError:
                    log.error('Could not interpolate ccc, not smooth')
                    ccc = normxcorr2(tr.data, image[0].data)
                    cc_max = np.amax(ccc)
                    shift = np.argmax(ccc) * image[0].stats.delta
                # Convert the maximum cross-correlation time to an actual time
                picktime = image[0].stats.starttime + shift
            else:
                # Convert the maximum cross-correlation time to an actual time
                ccc = normxcorr2(tr.data, image[0].data)
                cc_max = np.amax(ccc)
                picktime = image[0].stats.starttime + (np.argmax(ccc) *
                                                       image[0].stats.delta)
            log.debug('********DEBUG: Maximum cross-corr=%s' % cc_max)
            checksum += cc_max
            used_chans += 1
            if cc_max < min_cc:
                continue
            cccsum += cc_max
            # Perhaps weight each pick by the cc val or cc val^2?
            # weight = np.amax(ccc) ** 2
            if temp_chan[-1:] == 'Z':
                phase = 'P'
            # Only take the S-pick with the best correlation
            elif temp_chan[-1:] in ['E', 'N']:
                phase = 'S'
                if temp_sta not in s_stachans and np.amax(ccc) > min_cc:
                    s_stachans[temp_sta] = ((temp_chan, np.amax(ccc),
                                             picktime))
                elif temp_sta in s_stachans and np.amax(ccc) > min_cc:
                    if np.amax(ccc) > s_stachans[temp_sta][1]:
                        picktime = picktime
                    else:
                        picktime = s_stachans[temp_sta][2]
                        temp_chan = s_stachans[temp_sta][0]
                elif np.amax(ccc) < min_cc and temp_sta not in used_s_sta:
                    used_s_sta.append(temp_sta)
                else:
                    continue
            else:
                phase = None
            _waveform_id = WaveformStreamID(network_code=temp_net,
                                            station_code=temp_sta,
                                            channel_code=temp_chan)
            event.picks.append(
                Pick(waveform_id=_waveform_id,
                     time=picktime,
                     method_id=ResourceIdentifier('EQcorrscan'),
                     phase_hint=phase,
                     creation_info='eqcorrscan.core.lag_calc',
                     comments=[Comment(text='cc_max=%s' % cc_max)]))
            event.resource_id = detection_id
    ccc_str = ("detect_val=%s" % cccsum)
    event.comments.append(Comment(text=ccc_str))
    if used_chans == detect_chans:
        if pre_lag_ccsum is not None and checksum - pre_lag_ccsum < -0.05:
            msg = ('lag-calc has decreased cccsum from %f to %f - '
                   'report this error' % (pre_lag_ccsum, checksum))
            raise LagCalcError(msg)
    else:
        warnings.warn('Cannot check is cccsum is better, used %i channels '
                      'for detection, but %i are used here' %
                      (detect_chans, used_chans))
    return i, event
Beispiel #28
0
def outputOBSPY(hp, event=None, only_fm_picks=False):
    """
    Make an Event which includes the current focal mechanism information from HASH
    
    Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism.
    This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones.
    
    Inputs
    -------
    hp    : hashpy.HashPype instance
    
    event : obspy.core.event.Event
    
    only_fm_picks : bool of whether to overwrite the picks/arrivals lists
    
    
    Returns
    -------
    obspy.core.event.Event
    
    Event will be new if no event was input, FocalMech added to existing event
    """
    # Returns new (or updates existing) Event with HASH solution
    n = hp.npol
    if event is None:
        event = Event(focal_mechanisms=[], picks=[], origins=[])
        origin = Origin(arrivals=[])
        origin.time = UTCDateTime(hp.tstamp)
        origin.latitude = hp.qlat
        origin.longitude = hp.qlon
        origin.depth = hp.qdep
        origin.creation_info = CreationInfo(version=hp.icusp)
        origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(
            hp.icusp))
        for _i in range(n):
            p = Pick()
            p.creation_info = CreationInfo(version=hp.arid[_i])
            p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format(
                p.creation_info.version))
            p.waveform_id = WaveformStreamID(network_code=hp.snet[_i],
                                             station_code=hp.sname[_i],
                                             channel_code=hp.scomp[_i])
            if hp.p_pol[_i] > 0:
                p.polarity = 'positive'
            else:
                p.polarity = 'negative'
            a = Arrival()
            a.creation_info = CreationInfo(version=hp.arid[_i])
            a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format(
                p.creation_info.version))
            a.azimuth = hp.p_azi_mc[_i, 0]
            a.takeoff_angle = 180. - hp.p_the_mc[_i, 0]
            a.pick_id = p.resource_id
            origin.arrivals.append(a)
            event.picks.append(p)
        event.origins.append(origin)
        event.preferred_origin_id = origin.resource_id.resource_id
    else:  # just update the changes
        origin = event.preferred_origin()
        picks = []
        arrivals = []
        for _i in range(n):
            ind = hp.p_index[_i]
            a = origin.arrivals[ind]
            p = a.pick_id.getReferredObject()
            a.takeoff_angle = hp.p_the_mc[_i, 0]
            picks.append(p)
            arrivals.append(a)
        if only_fm_picks:
            origin.arrivals = arrivals
            event.picks = picks
    # Use me double couple calculator and populate planes/axes etc
    x = hp._best_quality_index
    # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred
    for s in range(hp.nmult):
        dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]])
        ax = dc.axis
        focal_mech = FocalMechanism()
        focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(),
                                                author=hp.author)
        focal_mech.triggering_origin_id = origin.resource_id
        focal_mech.resource_id = ResourceIdentifier(
            'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1))
        focal_mech.method_id = ResourceIdentifier('HASH')
        focal_mech.nodal_planes = NodalPlanes()
        focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1)
        focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2)
        focal_mech.principal_axes = PrincipalAxes()
        focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'],
                                                plunge=ax['T']['dip'])
        focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'],
                                                plunge=ax['P']['dip'])
        focal_mech.station_polarity_count = n
        focal_mech.azimuthal_gap = hp.magap
        focal_mech.misfit = hp.mfrac[s]
        focal_mech.station_distribution_ratio = hp.stdr[s]
        focal_mech.comments.append(
            Comment(
                hp.qual[s],
                resource_id=ResourceIdentifier(
                    focal_mech.resource_id.resource_id + '/comment/quality')))
        #----------------------------------------
        event.focal_mechanisms.append(focal_mech)
        if s == x:
            event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id
    return event
Beispiel #29
0
def cross_net(stream, env=False, master=False):
    """
    Generate picks using a simple envelope cross-correlation.

    Picks are made for each channel based on optimal moveout defined by
    maximum cross-correlation with master trace.  Master trace will be the
    first trace in the stream if not set.  Requires good inter-station
    coherance.

    :type stream: obspy.core.stream.Stream
    :param stream: Stream to pick
    :type env: bool
    :param env: To compute cross-correlations on the envelope or not.
    :type master: obspy.core.trace.Trace
    :param master:
        Trace to use as master, if False, will use the first trace in stream.

    :returns: :class:`obspy.core.event.event.Event`

    .. rubric:: Example

    >>> from obspy import read
    >>> from eqcorrscan.utils.picker import cross_net
    >>> st = read()
    >>> event = cross_net(st, env=True)
    >>> print(event.creation_info.author)
    EQcorrscan

    .. warning::
        This routine is not designed for accurate picking, rather it can be
        used for a first-pass at picks to obtain simple locations. Based on
        the waveform-envelope cross-correlation method.
    """
    event = Event()
    event.origins.append(Origin())
    event.creation_info = CreationInfo(author='EQcorrscan',
                                       creation_time=UTCDateTime())
    event.comments.append(Comment(text='cross_net'))
    samp_rate = stream[0].stats.sampling_rate
    if not env:
        Logger.info('Using the raw data')
        st = stream.copy()
        st.resample(samp_rate)
    else:
        st = stream.copy()
        Logger.info('Computing envelope')
        for tr in st:
            tr.resample(samp_rate)
            tr.data = envelope(tr.data)
    if not master:
        master = st[0]
    else:
        master = master
    master.data = np.nan_to_num(master.data)
    for i, tr in enumerate(st):
        tr.data = np.nan_to_num(tr.data)
        Logger.debug('Comparing {0} with the master'.format(tr.id))
        shift_len = int(0.3 * len(tr))
        Logger.debug('Shift length is set to ' + str(shift_len) + ' samples')
        index, cc = xcorr(master, tr, shift_len)
        wav_id = WaveformStreamID(station_code=tr.stats.station,
                                  channel_code=tr.stats.channel,
                                  network_code=tr.stats.network)
        event.picks.append(
            Pick(time=tr.stats.starttime + (index / tr.stats.sampling_rate),
                 waveform_id=wav_id,
                 phase_hint='S',
                 onset='emergent'))
        Logger.debug(event.picks[i])
    event.origins[0].time = min([pick.time for pick in event.picks]) - 1
    # event.origins[0].latitude = float('nan')
    # event.origins[0].longitude = float('nan')
    # Set arbitrary origin time
    del st
    return event
Beispiel #30
0
def _internal_read_single_cmtsolution(buf):
    """
    Reads a single CMTSOLUTION file to a :class:`~obspy.core.event.Catalog`
    object.

    :param buf: File to read.
    :type buf: open file or file-like object
    """
    # The first line encodes the preliminary epicenter.
    line = buf.readline()

    hypocenter_catalog = line[:5].strip().decode()

    origin_time = line[5:].strip().split()[:6]
    values = list(map(int, origin_time[:-1])) + \
        [float(origin_time[-1])]
    try:
        origin_time = UTCDateTime(*values)
    except (TypeError, ValueError):
        warnings.warn("Could not determine origin time from line: %s. Will "
                      "be set to zero." % line)
        origin_time = UTCDateTime(0)
    line = line[28:].split()
    latitude, longitude, depth, body_wave_mag, surface_wave_mag = \
        map(float, line[:5])

    # The rest encodes the centroid solution.
    event_name = buf.readline().strip().split()[-1].decode()

    preliminary_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="prelim"),
        time=origin_time,
        longitude=longitude,
        latitude=latitude,
        # Depth is in meters.
        depth=depth * 1000.0,
        origin_type="hypocenter",
        region=_fe.get_region(longitude=longitude, latitude=latitude),
        evaluation_status="preliminary")

    preliminary_bw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_bw"),
        mag=body_wave_mag,
        magnitude_type="Mb",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    preliminary_sw_magnitude = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="prelim_sw"),
        mag=surface_wave_mag,
        magnitude_type="MS",
        evaluation_status="preliminary",
        origin_id=preliminary_origin.resource_id)

    values = [
        "time_shift", "half_duration", "latitude", "longitude", "depth",
        "m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"
    ]
    cmt_values = {
        _i: float(buf.readline().strip().split()[-1])
        for _i in values
    }

    # Moment magnitude calculation in dyne * cm.
    m_0 = 1.0 / math.sqrt(2.0) * math.sqrt(
        cmt_values["m_rr"]**2 + cmt_values["m_tt"]**2 + cmt_values["m_pp"]**2 +
        2.0 * cmt_values["m_rt"]**2 + 2.0 * cmt_values["m_rp"]**2 +
        2.0 * cmt_values["m_tp"]**2)
    m_w = 2.0 / 3.0 * (math.log10(m_0) - 16.1)

    # Convert to meters.
    cmt_values["depth"] *= 1000.0
    # Convert to Newton meter.
    values = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
    for value in values:
        cmt_values[value] /= 1E7

    cmt_origin = Origin(
        resource_id=_get_resource_id(event_name, "origin", tag="cmt"),
        time=origin_time + cmt_values["time_shift"],
        longitude=cmt_values["longitude"],
        latitude=cmt_values["latitude"],
        depth=cmt_values["depth"],
        origin_type="centroid",
        # Could rarely be different than the epicentral region.
        region=_fe.get_region(longitude=cmt_values["longitude"],
                              latitude=cmt_values["latitude"])
        # No evaluation status as it could be any of several and the file
        # format does not provide that information.
    )

    cmt_mag = Magnitude(
        resource_id=_get_resource_id(event_name, "magnitude", tag="mw"),
        # Round to 2 digits.
        mag=round(m_w, 2),
        magnitude_type="mw",
        origin_id=cmt_origin.resource_id)

    foc_mec = FocalMechanism(
        resource_id=_get_resource_id(event_name, "focal_mechanism"),
        # The preliminary origin most likely triggered the focal mechanism
        # determination.
        triggering_origin_id=preliminary_origin.resource_id)

    tensor = Tensor(m_rr=cmt_values["m_rr"],
                    m_pp=cmt_values["m_pp"],
                    m_tt=cmt_values["m_tt"],
                    m_rt=cmt_values["m_rt"],
                    m_rp=cmt_values["m_rp"],
                    m_tp=cmt_values["m_tp"])

    # Source time function is a triangle, according to the SPECFEM manual.
    stf = SourceTimeFunction(
        type="triangle",
        # The duration is twice the half duration.
        duration=2.0 * cmt_values["half_duration"])

    mt = MomentTensor(
        resource_id=_get_resource_id(event_name, "moment_tensor"),
        derived_origin_id=cmt_origin.resource_id,
        moment_magnitude_id=cmt_mag.resource_id,
        # Convert to Nm.
        scalar_moment=m_0 / 1E7,
        tensor=tensor,
        source_time_function=stf)

    # Assemble everything.
    foc_mec.moment_tensor = mt

    ev = Event(resource_id=_get_resource_id(event_name, "event"),
               event_type="earthquake")
    ev.event_descriptions.append(
        EventDescription(text=event_name, type="earthquake name"))
    ev.comments.append(
        Comment(text="Hypocenter catalog: %s" % hypocenter_catalog,
                force_resource_id=False))

    ev.origins.append(cmt_origin)
    ev.origins.append(preliminary_origin)
    ev.magnitudes.append(cmt_mag)
    ev.magnitudes.append(preliminary_bw_magnitude)
    ev.magnitudes.append(preliminary_sw_magnitude)
    ev.focal_mechanisms.append(foc_mec)

    # Set the preferred items.
    ev.preferred_origin_id = cmt_origin.resource_id.id
    ev.preferred_magnitude_id = cmt_mag.resource_id.id
    ev.preferred_focal_mechanism_id = foc_mec.resource_id.id

    ev.scope_resource_ids()

    return ev
Beispiel #31
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Beispiel #32
0
def _detect(detector,
            st,
            threshold,
            trig_int,
            moveout=0,
            min_trig=0,
            process=True,
            extract_detections=False,
            cores=1):
    """
    Detect within continuous data using the subspace method.

    Not to be called directly, use the detector.detect method.

    :type detector: eqcorrscan.core.subspace.Detector
    :param detector: Detector to use.
    :type st: obspy.core.stream.Stream
    :param st: Un-processed stream to detect within using the subspace \
        detector
    :type threshold: float
    :param threshold: Threshold value for detections between 0-1
    :type trig_int: float
    :param trig_int: Minimum trigger interval in seconds.
    :type moveout: float
    :param moveout: Maximum allowable moveout window for non-multiplexed,
        network detection.  See note.
    :type min_trig: int
    :param min_trig: Minimum number of stations exceeding threshold for \
        non-multiplexed, network detection. See note.
    :type process: bool
    :param process: Whether or not to process the stream according to the \
        parameters defined by the detector.  Default is to process the \
        data (True).
    :type extract_detections: bool
    :param extract_detections: Whether to extract waveforms for each \
        detection or not, if true will return detections and streams.

    :return: list of detections
    :rtype: list of eqcorrscan.core.match_filter.Detection
    """
    detections = []
    # First process the stream
    if process:
        Logger.info('Processing Stream')
        stream, stachans = _subspace_process(
            streams=[st.copy()],
            lowcut=detector.lowcut,
            highcut=detector.highcut,
            filt_order=detector.filt_order,
            sampling_rate=detector.sampling_rate,
            multiplex=detector.multiplex,
            stachans=detector.stachans,
            parallel=True,
            align=False,
            shift_len=None,
            reject=False,
            cores=cores)
    else:
        # Check the sampling rate at the very least
        for tr in st:
            if not tr.stats.sampling_rate == detector.sampling_rate:
                raise ValueError('Sampling rates do not match.')
        stream = [st]
        stachans = detector.stachans
    outtic = time.clock()
    # If multiplexed, how many samples do we increment by?
    if detector.multiplex:
        Nc = len(detector.stachans)
    else:
        Nc = 1
    # Here do all ffts
    fft_vars = _do_ffts(detector, stream, Nc)
    Logger.info('Computing detection statistics')
    Logger.info('Preallocating stats matrix')
    stats = np.zeros(
        (len(stream[0]), (len(stream[0][0]) // Nc) - (fft_vars[4] // Nc) + 1))
    for det_freq, data_freq_sq, data_freq, i in zip(fft_vars[0], fft_vars[1],
                                                    fft_vars[2],
                                                    np.arange(len(stream[0]))):
        # Calculate det_statistic in frequency domain
        stats[i] = _det_stat_freq(det_freq, data_freq_sq, data_freq,
                                  fft_vars[3], Nc, fft_vars[4], fft_vars[5])
        Logger.info('Stats matrix is shape %s' % str(stats[i].shape))
    trig_int_samples = detector.sampling_rate * trig_int
    Logger.info('Finding peaks')
    peaks = []
    for i in range(len(stream[0])):
        peaks.append(
            findpeaks.find_peaks2_short(arr=stats[i],
                                        thresh=threshold,
                                        trig_int=trig_int_samples))
    if not detector.multiplex:
        # Conduct network coincidence triggering
        peaks = findpeaks.coin_trig(peaks=peaks,
                                    samp_rate=detector.sampling_rate,
                                    moveout=moveout,
                                    min_trig=min_trig,
                                    stachans=stachans,
                                    trig_int=trig_int)
    else:
        peaks = peaks[0]
    if len(peaks) > 0:
        for peak in peaks:
            detecttime = st[0].stats.starttime + \
                (peak[1] / detector.sampling_rate)
            rid = ResourceIdentifier(id=detector.name + '_' + str(detecttime),
                                     prefix='smi:local')
            ev = Event(resource_id=rid)
            cr_i = CreationInfo(author='EQcorrscan',
                                creation_time=UTCDateTime())
            ev.creation_info = cr_i
            # All detection info in Comments for lack of a better idea
            thresh_str = 'threshold=' + str(threshold)
            ccc_str = 'detect_val=' + str(peak[0])
            used_chans = 'channels used: ' +\
                ' '.join([str(pair) for pair in detector.stachans])
            ev.comments.append(Comment(text=thresh_str))
            ev.comments.append(Comment(text=ccc_str))
            ev.comments.append(Comment(text=used_chans))
            for stachan in detector.stachans:
                tr = st.select(station=stachan[0], channel=stachan[1])
                if tr:
                    net_code = tr[0].stats.network
                else:
                    net_code = ''
                pick_tm = detecttime
                wv_id = WaveformStreamID(network_code=net_code,
                                         station_code=stachan[0],
                                         channel_code=stachan[1])
                ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
            detections.append(
                Detection(template_name=detector.name,
                          detect_time=detecttime,
                          no_chans=len(detector.stachans),
                          detect_val=peak[0],
                          threshold=threshold,
                          typeofdet='subspace',
                          threshold_type='abs',
                          threshold_input=threshold,
                          chans=detector.stachans,
                          event=ev))
    outtoc = time.clock()
    Logger.info('Detection took %s seconds' % str(outtoc - outtic))
    if extract_detections:
        detection_streams = extract_from_stream(st, detections)
        return detections, detection_streams
    return detections
Beispiel #33
0
def brightness(stations,
               nodes,
               lags,
               stream,
               threshold,
               thresh_type,
               template_length,
               template_saveloc,
               coherence_thresh,
               coherence_stations=['all'],
               coherence_clip=False,
               gap=2.0,
               clip_level=100,
               instance=0,
               pre_pick=0.2,
               plotsave=True,
               cores=1):
    r"""Function to calculate the brightness function in terms of energy for \
    a day of data over the entire network for a given grid of nodes.

    Note data in stream must be all of the same length and have the same
    sampling rates.

    :type stations: list
    :param stations: List of station names from in the form where stations[i] \
        refers to nodes[i][:] and lags[i][:]
    :type nodes: list, tuple
    :param nodes: List of node points where nodes[i] referes to stations[i] \
        and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \
        longitude in degrees, nodes[:][:][2] is depth in km.
    :type lags: :class: 'numpy.array'
    :param lags: Array of arrays where lags[i][:] refers to stations[i]. \
        lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \
        seconds.
    :type stream: :class: `obspy.Stream`
    :param data: Data through which to look for detections.
    :type threshold: float
    :param threshold: Threshold value for detection of template within the \
        brightness function
    :type thresh_type: str
    :param thresh_type: Either MAD or abs where MAD is the Median Absolute \
        Deviation and abs is an absoulte brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple of floats
    :param coherence_thresh: Threshold for removing incoherant peaks in the \
            network response, those below this will not be used as templates. \
            Must be in the form of (a,b) where the coherence is given by: \
            a-kchan/b where kchan is the number of channels used to compute \
            the coherence
    :type coherence_stations: list
    :param coherence_stations: List of stations to use in the coherance \
            thresholding - defaults to 'all' which uses all the stations.
    :type coherence_clip: float
    :param coherence_clip: tuple
    :type coherence_clip: Start and end in seconds of data to window around, \
            defaults to False, which uses all the data given.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotsave: bool
    :param plotsave: Save or show plots, if False will try and show the plots \
            on screen - as this is designed for bulk use this is set to \
            True to save any plots rather than show them if you create \
            them - changes the backend of matplotlib, so if is set to \
            False you will see NO PLOTS!
    :type cores: int
    :param core: Number of cores to use, defaults to 1.
    :type clip_level: float
    :param clip_level: Multiplier applied to the mean deviation of the energy \
                    as an upper limit, used to remove spikes (earthquakes, \
                    lightning, electircal spikes) from the energy stack.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections

    :return: list of templates as :class: `obspy.Stream` objects
    """
    from eqcorrscan.core.template_gen import _template_gen
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    # from joblib import Parallel, delayed
    from multiprocessing import Pool, cpu_count
    from copy import deepcopy
    from obspy import read as obsread
    from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin
    from obspy.core.event import EventDescription, CreationInfo, Comment
    import obspy.Stream
    import matplotlib.pyplot as plt
    from eqcorrscan.utils import EQcorrscan_plotting as plotting
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to converse memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise OverflowError('Too many streams, either re-code and cope with' +
                            'either more memory usage, or less precision, or' +
                            'reduce data volume')
    detections = []
    detect_lags = []
    parallel = True
    plotvar = True
    mem_issue = False
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    if not parallel:
        for i in range(0, len(nodes)):
            print(i)
            if not mem_issue:
                j, a = _node_loop(stations, lags[:, i], stream, plot=True)
                if 'energy' not in locals():
                    energy = a
                else:
                    energy = np.concatenate((energy, a), axis=0)
                print('energy: ' + str(np.shape(energy)))
            else:
                j, filename = _node_loop(stations, lags[:, i], stream, i,
                                         mem_issue)
        energy = np.array(energy)
        print(np.shape(energy))
    else:
        # Parallel run
        num_cores = cores
        if num_cores > len(nodes):
            num_cores = len(nodes)
        if num_cores > cpu_count():
            num_cores = cpu_count()
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_node_loop,
                             args=(stations, lags[:, i], stream, i, clip_level,
                                   mem_issue, instance))
            for i in range(len(nodes))
        ]
        pool.close()
        if not mem_issue:
            print('Computing the cumulative network response from memory')
            energy = [p.get() for p in results]
            pool.join()
            energy.sort(key=lambda tup: tup[0])
            energy = [node[1] for node in energy]
            energy = np.concatenate(energy, axis=0)
            print(energy.shape)
        else:
            pool.join()
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indeces = np.argmax(energy, axis=0)  # Indeces of maximum energy
        print(indeces.shape)
        cum_net_resp = np.array([np.nan] * len(indeces))
        cum_net_resp[0] = energy[indeces[0]][0]
        peak_nodes = [nodes[indeces[0]]]
        for i in range(1, len(indeces)):
            cum_net_resp[i] = energy[indeces[i]][i]
            peak_nodes.append(nodes[indeces[i]])
        del energy, indeces
    else:
        print('Reading the temp files and computing network response')
        node_splits = len(nodes) // num_cores
        indeces = [range(node_splits)]
        for i in range(1, num_cores - 1):
            indeces.append(range(node_splits * i, node_splits * (i + 1)))
        indeces.append(range(node_splits * (i + 1), len(nodes)))
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_cum_net_resp, args=(indeces[i], instance))
            for i in range(num_cores)
        ]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indeces = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indeces = np.argmax(cum_net_resp, axis=0)
        print(indeces.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array(
            [cum_net_resp[indeces[i]][i] for i in range(len(indeces))])
        peak_nodes = [
            nodes[node_indeces[indeces[i]][i]] for i in range(len(indeces))
        ]
        del indeces, node_indeces
    if plotvar:
        cum_net_trace = deepcopy(stream[0])
        cum_net_trace.data = cum_net_resp
        cum_net_trace.stats.station = 'NR'
        cum_net_trace.stats.channel = ''
        cum_net_trace.stats.network = 'Z'
        cum_net_trace.stats.location = ''
        cum_net_trace.stats.starttime = stream[0].stats.starttime
        cum_net_trace = obspy.Stream(cum_net_trace)
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])
        # np.save('cum_net_resp.npy',cum_net_resp)
        #     cum_net_trace.plot(size=(800,600), equal_scale=False,\
        #                        outfile='NR_timeseries.eps')

    # Find detection within this network response
    print('Finding detections in the cumulatve network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections in to templates')
        # Generate a catalog of detections
        detections_cat = Catalog()
        for j, detection in enumerate(detections):
            print('Converting for detection ' + str(j) + ' of ' +
                  str(len(detections)))
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            print(node)
            # Look up node in nodes and find the associated lags
            index = nodes.index(node)
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + len(detect_lags))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(
                            station_code=tr.stats.station,
                            channel_code=tr.stats.channel,
                            network_code='NA')
                        event.picks.append(
                            Pick(waveform_id=_waveform_id,
                                 time=tr.stats.starttime + detect_lag +
                                 detection.detect_time + pre_pick,
                                 onset='emergent',
                                 evalutation_mode='automatic'))
            print('Generating template for detection: ' + str(j))
            template = (_template_gen(event.picks, copy_of_stream,
                                      template_length, 'all'))
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherancy here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherant = True
            else:
                print('Template was incoherant, coherence level: ' +
                      str(temp_coher))
                coherant = False
            del copy_of_stream, tr, template
            if coherant:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            else:
                print('No template for you')
    if plotvar:
        all_detections = [(cum_net_trace[-1].stats.starttime +
                           detection.detect_time).datetime
                          for detection in detections]
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             save=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
Beispiel #34
0
def _read_focmec_lst(lines):
    """
    Read given data into an :class:`~obspy.core.event.Event` object.

    Unfortunately, "lst" is not a well defined file format but what it outputs
    depends on input data, program parameters, program version and also
    resulting focal mechanisms. But it has way more information than the "out"
    format, so it's worth the additional effort to try and parse all flavors of
    it.

    :type lines: list
    :param lines: List of decoded unicode strings with data from a FOCMEC lst
        file.
    """
    event, _ = _read_common_header(lines)
    # don't regard separator lines at end of file
    separator_indices = [
        i for i, line in enumerate(lines)
        if _is_lst_block_start(line) and i < len(lines) - 1
    ]
    if not separator_indices:
        return event
    header = lines[:separator_indices[0]]
    # get how many polarities are used
    polarity_count, _ = _get_polarity_count(header)
    # compute azimuthal gap
    for i, line in enumerate(header):
        if line.split()[:3] == ['Statn', 'Azimuth', 'TOA']:
            break
    azimuths = []
    emergent_ignored = False
    try:
        for line in header[i + 1:]:
            # some lst files have some comments on not using emergent
            # polarities right in the middle of the polarity block..
            if line.strip().lower() == 'not including emergent polarity picks':
                emergent_ignored = True
                continue
            # at the end of the polarity info block is the polarity summary
            # line..
            if _match_polarity_summary_line(line):
                break
            sta, azimuth, takeoff_angle, key = line.split()[:4]
            # these are all keys that identify a station polarity in FOCMEC,
            # because here we do not take into account amplitude ratios for the
            # azimuthal gap
            if key in POLARITIES:
                azimuths.append((float(azimuth), key))
    except IndexError:
        pass
    # if specified in output, only regard impulsive polarities
    azimuths = sorted(azimuths)
    azimuths = [
        azimuth_ for azimuth_, key_ in azimuths
        if not emergent_ignored or key_ in POLARITIES_IMPULSIVE
    ]
    if polarity_count is not None and len(azimuths) != polarity_count:
        msg = ('Unexpected mismatch in number of polarity lines found ({:d}) '
               'and used polarities indicated by header ({:d})').format(
                   len(azimuths), polarity_count)
        warnings.warn(msg)
    if len(azimuths) > 1:
        # numpy diff on the azimuth list is missing to compare first and last
        # entry (going through North), so add that manually
        azimuthal_gap = np.diff(azimuths).max()
        azimuthal_gap = max(azimuthal_gap, azimuths[0] + 360 - azimuths[-1])
    else:
        azimuthal_gap = None

    event.comments.append(Comment(text='\n'.join(header)))
    blocks = []
    for i in separator_indices[::-1]:
        blocks.append(lines[i + 1:])
        lines = lines[:i]
    blocks = blocks[::-1]
    for block in blocks:
        focmec, lines = _read_focmec_lst_one_block(block, polarity_count)
        if focmec is None:
            continue
        focmec.azimuthal_gap = azimuthal_gap
        focmec.creation_info = CreationInfo(
            version='FOCMEC', creation_time=event.creation_info.creation_time)
        event.focal_mechanisms.append(focmec)
    return event