Exemplo n.º 1
0
def remove_unreferenced(catalog: Union[Catalog, Event]) -> Catalog:
    """ Remove un-referenced arrivals, amplitudes and station_magnitudes. """
    if isinstance(catalog, Event):
        catalog = Catalog([catalog])
    catalog_out = Catalog()
    for _event in catalog:
        event = _event.copy()
        pick_ids = {p.resource_id for p in event.picks}
        # Remove unreferenced arrivals
        for origin in event.origins:
            origin.arrivals = [
                arr for arr in origin.arrivals if arr.pick_id in pick_ids]
        # Remove unreferenced amplitudes
        event.amplitudes = [
            amp for amp in event.amplitudes if amp.pick_id in pick_ids]
        amplitude_ids = {a.resource_id for a in event.amplitudes}
        # Remove now unreferenced station magnitudes
        event.station_magnitudes = [
            sta_mag for sta_mag in event.station_magnitudes
            if sta_mag.amplitude_id in amplitude_ids]
        station_magnitude_ids = {
            sta_mag.resource_id for sta_mag in event.station_magnitudes}
        # Remove unreferenced station_magnitude_contributions
        for magnitude in event.magnitudes:
            magnitude.station_magnitude_contributions = [
                sta_mag_contrib
                for sta_mag_contrib in magnitude.station_magnitude_contributions
                if sta_mag_contrib.station_magnitude_id in station_magnitude_ids]
        catalog_out.append(event)

    return catalog_out
Exemplo n.º 2
0
def rand_cat_sample(cat, n_events, cat2=False):
    from obspy import Catalog
    rand_cat = Catalog()
    indices = np.random.choice(range(len(cat)), n_events, replace=False)
    rand_cat.events = [cat[i] for i in indices]
    if cat2:
        rand_cat2 = Catalog()
        rand_cat2.events = [cat[i] for i in indices]
    return rand_cat
Exemplo n.º 3
0
def bbox_two_cat(cat1, cat2, bbox, depth_thresh):
    new_cat1 = Catalog()
    new_cat2 = Catalog()
    for i, ev in enumerate(cat1):
        if min(bbox[0]) <= ev.origins[-1].longitude <= max(bbox[0]) \
                and min(bbox[1]) <= ev.origins[-1].latitude <= max(bbox[1]) \
                and ev.origins[-1].depth <= depth_thresh * 1000:
            new_cat1.events.append(ev)
            new_cat2.events.append(cat2[i])
    return new_cat1, new_cat2
Exemplo n.º 4
0
def find_common_events(catP, catS):
    """
    Takes parallel catalogs, one with P only, the other with added S phases
    :param catP: Catalog with only p-picks
    :param catS: Catalog with S-picks added
    :return: two parallel catalogs including events with S-picks and their corresponding P-only versions
    """
    comm_cat_S = Catalog()
    comm_cat_P = Catalog()
    for i, ev in enumerate(catS):
        if len([pk for pk in ev.picks if pk.phase_hint == 'S']) > 0:
            comm_cat_S.events.append(ev)
            comm_cat_P.events.append(catP[i])
    return comm_cat_P, comm_cat_S
Exemplo n.º 5
0
    def download_eventcat(self):
        event_cat_done = False

        while not event_cat_done:
            try:
                # Check length of request and split if longer than 20yrs.
                a = 20 * 365.25 * 24 * 3600  # 20 years in seconds
                if self.endtime - self.starttime > a:
                    # Request is too big, break it down ito several requests

                    starttimes = [self.starttime, self.starttime + a]
                    while self.endtime - starttimes[-1] > a:
                        starttimes.append(starttimes[-1] + a)
                    endtimes = []
                    endtimes.extend(starttimes[1:])
                    endtimes.append(self.endtime)

                    # Query
                    self.evtcat = Catalog()
                    for st, et in zip(starttimes, endtimes):
                        self.evtcat.extend(
                            self.webclient.get_events(
                                starttime=st,
                                endtime=et,
                                minlatitude=self.eMINLAT,
                                maxlatitude=self.eMAXLAT,
                                minlongitude=self.eMINLON,
                                maxlongitude=self.eMAXLON,
                                minmagnitude=self.minmag,
                                maxmagnitude=10,
                                maxdepth=self.maxdepth))
                    event_cat_done = True

                else:
                    self.evtcat = self.webclient.get_events(
                        starttime=self.starttime,
                        endtime=self.endtime,
                        minlatitude=self.eMINLAT,
                        maxlatitude=self.eMAXLAT,
                        minlongitude=self.eMINLON,
                        maxlongitude=self.eMAXLON,
                        minmagnitude=self.minmag,
                        maxmagnitude=10,
                        maxdepth=self.maxdepth)

                    event_cat_done = True

            except IncompleteRead:
                # Server interrupted connection, just try again
                msg = "Server interrupted connection, restarting download..."
                warn(msg, UserWarning)
                print(msg)
                continue

        os.makedirs(self.evtloc, exist_ok=True)
        # check if there is a better format for event catalog
        self.evtcat.write(os.path.join(
            self.evtloc,
            datetime.now().strftime("%Y%m%dT%H%M%S")),
                          format="QUAKEML")
Exemplo n.º 6
0
def run_hashpy(catalog, config, outfile):
    """
    Wrapper on hashpy for calculating HASH focal mechanisms
    :param catalog: :class: obspy.core.event.Catalog
    :param config: Configuration dict for hashpy
    :return:
    """
    new_cat = Catalog()
    for ev in catalog:
        eid = str(ev.resource_id).split('/')[-1]
        # Set up hashpy object
        hp = HashPype(**config)
        hp.input(ev, format="OBSPY")
        hp.load_velocity_models()
        hp.generate_trial_data()
        try:
            hp.calculate_takeoff_angles()
        except:
            print('Error in toa calc for eid: {}'.format(eid))
            continue
        pass1 = hp.check_minimum_polarity()
        pass2 = hp.check_maximum_gap()
        if pass1 and pass2:
            try:
                hp.calculate_hash_focalmech()
                hp.calculate_quality()
            except:
                print('Error in fm calc for eid: {}'.format(eid))
                continue
        else:
            print("Minimum polarity and/or maximum gap check failed")
            continue
        new_cat += hp.output(format="OBSPY")
    new_cat.write(outfile, format="QUAKEML")
    return
Exemplo n.º 7
0
    def cluster(self, method, **kwargs):
        """
        Cluster the tribe.

        Cluster templates within a tribe: returns multiple tribes each of
        which could be stacked.

        :type method: str
        :param method:
            Method of stacking, see :mod:`eqcorrscan.utils.clustering`

        :return: List of tribes.

        .. rubric:: Example


        """
        from eqcorrscan.utils import clustering
        tribes = []
        func = getattr(clustering, method)
        if method in ['space_cluster', 'space_time_cluster']:
            cat = Catalog([t.event for t in self.templates])
            groups = func(cat, **kwargs)
            for group in groups:
                new_tribe = Tribe()
                for event in group:
                    new_tribe.templates.extend([t for t in self.templates
                                                if t.event == event])
                tribes.append(new_tribe)
        return tribes
Exemplo n.º 8
0
 def test_listener(self):
     listener = CatalogListener(
         client=Client("GEONET"),
         catalog=Catalog(),
         interval=20,
         template_bank=TemplateBank(base_path=self.test_path),
         catalog_lookup_kwargs=dict(latitude=-38.8,
                                    longitude=175.8,
                                    maxradius=3.),
         keep=600,
     )
     # Use a period of known seismicity: 2019-07-06, swarm under Taupo
     seconds_before_now = UTCDateTime.now() - UTCDateTime(2019, 7, 6, 9, 12)
     listener._test_start_step = seconds_before_now
     listener.background_run(filter_func=filter_events,
                             auto_pick=False,
                             make_templates=False,
                             template_kwargs=dict(lowcut=2.,
                                                  highcut=15.,
                                                  samp_rate=50.,
                                                  filt_order=4,
                                                  prepick=0.5,
                                                  length=3,
                                                  swin="P"))
     self.assertTrue(listener.busy)
     time.sleep(120)
     listener.background_stop()
     self.assertFalse(listener.busy)
     self.assertEqual(len(listener.old_events), 1)
Exemplo n.º 9
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
     # Test that full paths are handled
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
     # Check that multiple wavefiles are read properly
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test', 'albert'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test', 'albert'])
Exemplo n.º 10
0
    def put_templates(
        self,
        templates: Union[list, Tribe],
        update_index: bool = True,
    ) -> None:
        """
        Save templates to the database.

        Parameters
        ----------
        templates
            Templates to put into the database
        update_index
            Flag to indicate whether or not to update the entire event index
            after writing the new events.
        """
        for t in templates:
            assert (isinstance(t, Template))
        catalog = Catalog([t.event for t in templates])
        self.put_events(catalog, update_index=update_index)
        inner_put_template = partial(
            _put_template,
            path_structure=self.path_structure,
            template_name_structure=self.template_name_structure,
            bank_path=self.bank_path)
        _ = [_ for _ in self.executor.map(inner_put_template, templates)]
Exemplo n.º 11
0
    def _read_from_folder(self, dirname):
        """
        Internal folder reader.

        :type dirname: str
        :param dirname: Folder to read from.
        """
        templates = _par_read(dirname=dirname, compressed=False)
        t_files = glob.glob(dirname + os.sep + '*.ms')
        tribe_cat_file = glob.glob(os.path.join(dirname, "tribe_cat.*"))
        if len(tribe_cat_file) != 0:
            tribe_cat = read_events(tribe_cat_file[0])
        else:
            tribe_cat = Catalog()
        previous_template_names = [t.name for t in self.templates]
        for template in templates:
            if template.name in previous_template_names:
                # Don't read in for templates that we already have.
                continue
            for event in tribe_cat:
                for comment in event.comments:
                    if comment.text == 'eqcorrscan_template_' + template.name:
                        template.event = event
            t_file = [t for t in t_files
                      if t.split(os.sep)[-1] == template.name + '.ms']
            if len(t_file) == 0:
                Logger.error('No waveform for template: ' + template.name)
                templates.remove(template)
                continue
            elif len(t_file) > 1:
                Logger.warning('Multiple waveforms found, using: ' + t_file[0])
            template.st = read(t_file[0])
        self.templates.extend(templates)
        return
Exemplo n.º 12
0
    def test_fail_writing(self):
        """
        Test a deliberate fail.
        """
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        test_ev = test_cat[0]
        test_cat.append(full_test_event())
        with self.assertRaises(NordicParsingError):
            # Raises error due to multiple events in catalog
            _write_nordic(test_cat, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to too long userid
            _write_nordic(test_ev, filename=None, userid='TESTICLE',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to unrecognised event type
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='U', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to no output directory
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='L', outdir='albatross',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()

        invalid_origin.origins = []
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()
        invalid_origin.origins[0].time = None
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        # Write a near empty origin
        valid_origin = test_ev.copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        with NamedTemporaryFile() as tf:
            _write_nordic(valid_origin, filename=tf.name, userid='TEST',
                          evtype='L', outdir='.', wavefiles='test',
                          explosion=True, overwrite=True)
            self.assertTrue(os.path.isfile(tf.name))
Exemplo n.º 13
0
    def write(self, filename, compress=True, catalog_format="QUAKEML"):
        """
        Write the tribe to a file using tar archive formatting.

        :type filename: str
        :param filename:
            Filename to write to, if it exists it will be appended to.
        :type compress: bool
        :param compress:
            Whether to compress the tar archive or not, if False then will
            just be files in a folder.
        :type catalog_format: str
        :param catalog_format:
            What format to write the detection-catalog with. Only Nordic,
            SC3ML, QUAKEML are supported. Note that not all information is
            written for all formats (QUAKEML is the most complete, but is
            slow for IO).

        .. rubric:: Example

        >>> tribe = Tribe(templates=[Template(name='c', st=read())])
        >>> tribe.write('test_tribe')
        Tribe of 1 templates
        """
        from eqcorrscan.core.match_filter import CAT_EXT_MAP

        if catalog_format not in CAT_EXT_MAP.keys():
            raise TypeError("{0} is not supported".format(catalog_format))
        dirname, ext = os.path.splitext(filename)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)
        self._par_write(dirname)
        tribe_cat = Catalog()
        for t in self.templates:
            if t.event is not None:
                # Check that the name in the comment matches the template name
                for comment in t.event.comments:
                    if comment.text and comment.text.startswith(
                            "eqcorrscan_template_"):
                        comment.text = "eqcorrscan_template_{0}".format(t.name)
                tribe_cat.append(t.event)
        if len(tribe_cat) > 0:
            tribe_cat.write(
                os.path.join(dirname, 'tribe_cat.{0}'.format(
                    CAT_EXT_MAP[catalog_format])), format=catalog_format)
        for template in self.templates:
            template.st.write(
                os.path.join(dirname, '{0}.ms'.format(template.name)),
                format='MSEED')
        if compress:
            if not filename.endswith(".tgz"):
                Logger.info("Appending '.tgz' to filename.")
                filename += ".tgz"
            with tarfile.open(filename, "w:gz") as tar:
                tar.add(dirname, arcname=os.path.basename(dirname))
            shutil.rmtree(dirname)
        return self
Exemplo n.º 14
0
 def get_global_earthquakes(client, starttime, endtime):
   try:
     return client.get_events(
                     starttime=starttime,
                     endtime=endtime,
                     minmagnitude=6
                     )
   except Exception as e:
     print(e)
     return Catalog()
Exemplo n.º 15
0
def mseed_2_Party(wav_dir, cat, temp_cat, lowcut, highcut, filt_order,
                  process_length, prepick):
    """
    Take waveforms and catalog and create a Party object
    :param wav_dir:
    :param cat:
    :return:
    """

    partay = Party()
    # Get templates first
    temp_tup = [(ev, str(ev.resource_id).split('/')[-1].split('_')[0])
                for ev in cat
                if str(ev.resource_id).split('/')[-1].split('_')[-1]=='self']
    temp_evs, temp_ids = zip(*temp_tup)
    temp_evs = list(temp_evs)
    wav_files = ['%s/%s.mseed' % (wav_dir, str(ev.resource_id).split('/')[-1])
                 for ev in temp_evs]
    temp_wavs = [read(wav) for wav in wav_files if os.path.isfile(wav)]
    for temp_wav, temp_ev in zip(temp_wavs, temp_evs):
        #Create a Template object, assign it to Family and then to Party
        tid = str(temp_ev.resource_id).split('/')[-1].split('_')[0]
        if len([ev for ev in temp_cat
                if str(ev.resource_id).split('/')[-1] == tid]) > 0:
            temp_ev = [ev for ev in temp_cat
                           if str(ev.resource_id).split('/')[-1] == tid][0]
        tmp = Template(name=tid, st=temp_wav, lowcut=lowcut, highcut=highcut,
                       samp_rate=temp_wav[0].stats.sampling_rate,
                       filt_order=filt_order, process_length=process_length,
                       prepick=prepick, event=temp_ev)
        fam_det_evs = [ev for ev in cat
                       if str(ev.resource_id).split('/')[-1].split('_')[-1]!='self'
                       and str(ev.resource_id).split('/')[-1].split('_')[0]==tid]
        fam_dets = [Detection(template_name=str(ev.resource_id).split('/')[-1].split('_')[0],
                              detect_time=UTCDateTime([com.text.split('=')[-1]
                                                       for com in ev.comments
                                                       if com.text.split('=')[0]=='det_time'][0]),
                              no_chans=len(ev.picks),
                              chans=[pk.waveform_id.station_code
                                     for pk in ev.picks],
                              detect_val=float([com.text.split('=')[-1]
                                                for com in ev.comments
                                                if com.text.split('=')[0]=='detect_val'][0]),
                              threshold=float([com.text.split('=')[-1]
                                               for com in ev.comments
                                               if com.text.split('=')[0]=='threshold'][0]),
                              typeofdet='corr',
                              threshold_type='MAD',
                              threshold_input=8.0,
                              event=ev, id=str(ev.resource_id).split('/')[-1])
                    for ev in fam_det_evs]
        fam_cat = Catalog(events=[det.event for det in fam_dets])
        fam = Family(template=tmp, detections=fam_dets, catalog=fam_cat)
        partay.families.append(fam)
    return partay
Exemplo n.º 16
0
def ORNL_events_to_cat(ornl_file):
    """Make Catalog from ORNL locations"""
    cat = Catalog()
    loc_df = pd.read_csv(ornl_file, infer_datetime_format=True)
    loc_df = loc_df.set_index('event_datetime')
    eid = 0
    for dt, row in loc_df.iterrows():
        ot = UTCDateTime(dt)
        hmc_east = row['x(m)']
        hmc_north = row['y(m)']
        hmc_elev = row['z(m)']
        errX = row['error_x (m)']
        errY = row['error_y (m)']
        errZ = row['error_z (m)']
        rms = row['rms (millisecond)']
        converter = SURF_converter()
        lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north,
                                              hmc_elev))
        o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev)
        o.origin_uncertainty = OriginUncertainty()
        o.quality = OriginQuality()
        ou = o.origin_uncertainty
        oq = o.quality
        ou.max_horizontal_uncertainty = np.max([errX, errY])
        ou.min_horizontal_uncertainty = np.min([errX, errY])
        o.depth_errors.uncertainty = errZ
        oq.standard_error = rms * 1e3
        extra = AttribDict({
            'hmc_east': {
                'value': hmc_east,
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': hmc_north,
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': hmc_elev,
                'namespace': 'smi:local/hmc'
            },
            'hmc_eid': {
                'value': eid,
                'namespace': 'smi:local/hmc'
            }
        })
        o.extra = extra
        rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f'))
        # Dummy magnitude of 1. for all events until further notice
        mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.))
        ev = Event(origins=[o], magnitudes=[mag], resource_id=rid)
        ev.preferred_origin_id = o.resource_id.id
        cat.events.append(ev)
        eid += 1
    return cat
Exemplo n.º 17
0
def filter_catalog(catalog,lon0,lat0,minrad,maxrad):
   temp_events = []
   for ev in catalog:
      ev_lat = ev.origins[0]['latitude']
      ev_lon = ev.origins[0]['longitude']
      distaz = gps2dist_azimuth(ev_lat,ev_lon,lat0,lon0)
      dist_m = distaz[0]
      dist_deg = kilometer2degrees((dist_m/1000.0))
      if dist_deg >= minrad and dist_deg <= maxrad:
         temp_events.append(ev)
   cat = Catalog(events=temp_events)
   return cat
Exemplo n.º 18
0
def remove_bad_picks(cat, st_dev):
    # For removing events with 1 or more bad picks
    filtered_cat = Catalog()
    for event in cat:
        pref_o = event.preferred_origin()
        bad_arrivals = [x for x in pref_o.arrivals if x.time_residual > st_dev]
        if bad_arrivals:
            del bad_arrivals
            continue
        else:
            filtered_cat.append(event)
            del bad_arrivals
Exemplo n.º 19
0
def read_quakemigrate(run_dir, units, run_subname="", local_mag_ph="S"):
    """
    Reads the .event and .picks outputs, and .amps outputs if available, from a
    QuakeMigrate run into an obspy Catalog object.

    NOTE: if a station_corrections dict was used to calculate the
    network-averaged local magnitude, this information will not be included in
    the obspy event object. There might therefore be a discrepancy between the
    mean of the StationMagnitudes and the event magnitude.

    Parameters
    ----------
    run_dir : str
        Path to QuakeMigrate run directory.
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    run_subname : str, optional
        Run_subname string (if applicable).
    local_mag_ph : {"S", "P"}, optional
        Amplitude measurement used to calculate local magnitudes. (Default "S")

    Returns
    -------
    cat : `obspy.Catalog` object
        Catalog containing events in the specified QuakeMigrate run directory.

    """

    locate_dir = pathlib.Path(run_dir) / "locate" / run_subname
    events_dir = locate_dir / "events"

    if events_dir.is_dir():
        try:
            event_files = events_dir.glob("*.event")
            first = next(event_files)
            event_files = chain([first], event_files)
        except StopIteration:
            pass

    cat = Catalog()

    for eventf in event_files:
        event = _read_single_event(eventf, locate_dir, units, local_mag_ph)
        if event is None:
            continue
        else:
            cat.append(event)

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__

    return cat
Exemplo n.º 20
0
    def __init__(self):
        # handle imports in init to avoid circular imports
        import obspy.core.event as ev
        from obspy import UTCDateTime, Catalog

        self.ev = ev
        self.UTCDateTime = UTCDateTime
        self.Catalog = Catalog
        self.ResourceIdentifier = ev.ResourceIdentifier
        # create events and bind to self
        self.time = UTCDateTime("2016-05-04T12:00:01")
        events = [self._create_event()]
        self.catalog = Catalog(events=events)
Exemplo n.º 21
0
 def get_japan_earthquakes(client, starttime, endtime):
   try:
     return client.get_events(
                     starttime=starttime,
                     endtime=endtime,
                     latitude=35.6895,
                     longitude=139.6917,
                     maxradius=15,
                     maxmagnitude=6
                     )
   except Exception as e:
     print(e)
     return Catalog()
Exemplo n.º 22
0
def space_cluster(catalog, d_thresh, show=True):
    """
    Cluster a catalog by distance only.

    Will compute the\
    matrix of physical distances between events and utilize the\
    scipy.clustering.hierarchy module to perform the clustering.

    :type catalog: obspy.Catalog
    :param catalog: Catalog of events to clustered
    :type d_thresh: float
    :param d_thresh: Maximum inter-event distance threshold

    :returns: list of Catalog classes
    """
    from scipy.spatial.distance import squareform
    from scipy.cluster.hierarchy import linkage, dendrogram, fcluster
    import matplotlib.pyplot as plt
    from obspy import Catalog

    # Compute the distance matrix and linkage
    dist_mat = dist_mat_km(catalog)
    dist_vec = squareform(dist_mat)
    Z = linkage(dist_vec, method='average')

    # Cluster the linkage using the given threshold as the cutoff
    indices = fcluster(Z, t=d_thresh, criterion='distance')
    group_ids = list(set(indices))
    indices = [(indices[i], i) for i in range(len(indices))]

    if show:
        # Plot the dendrogram...if it's not way too huge
        dendrogram(Z, color_threshold=d_thresh, distance_sort='ascending')
        plt.show()

    # Sort by group id
    indices.sort(key=lambda tup: tup[0])
    groups = []
    for group_id in group_ids:
        group = Catalog()
        for ind in indices:
            if ind[0] == group_id:
                group.append(catalog[ind[1]])
            elif ind[0] > group_id:
                # Because we have sorted by group id, when the index is greater
                # than the group_id we can break the inner loop.
                # Patch applied by CJC 05/11/2015
                groups.append(group)
                break
    groups.append(group)
    return groups
Exemplo n.º 23
0
def run(**kwargs):
    config = read_config(config_file=kwargs.get("config_file", None))
    debug = kwargs.get("debug", False)
    update_bank = kwargs.get("update_bank", True)
    listener_starttime = kwargs.get("listener_starttime", None)
    if debug:
        config.log_level = "DEBUG"
        print("Using the following configuration:\n{0}".format(config))
    config.setup_logging()
    Logger.debug("Running in debug mode - expect lots of output!")

    client = config.rt_match_filter.get_client()

    trigger_func = partial(
        magnitude_rate_trigger_func,
        magnitude_threshold=config.reactor.magnitude_threshold,
        rate_threshold=config.reactor.rate_threshold,
        rate_bin=config.reactor.rate_radius,
        minimum_events_in_bin=config.reactor.minimum_events_in_bin)

    template_bank = TemplateBank(
        config.database_manager.event_path,
        name_structure=config.database_manager.name_structure,
        event_format=config.database_manager.event_format,
        path_structure=config.database_manager.path_structure,
        event_ext=config.database_manager.event_ext,
        executor=ProcessPoolExecutor())

    if update_bank:
        Logger.info("Updating bank before running")
        template_bank.update_index()

    listener = CatalogListener(
        client=client,
        catalog_lookup_kwargs=config.reactor.catalog_lookup_kwargs,
        template_bank=template_bank,
        interval=600,
        keep=86400.,
        catalog=Catalog(),
        waveform_client=config.rt_match_filter.get_waveform_client())

    reactor = Reactor(client=client,
                      listener=listener,
                      trigger_func=trigger_func,
                      template_database=template_bank,
                      config=config,
                      listener_starttime=listener_starttime)
    reactor.run(max_run_length=config.reactor.max_run_length)
    return
Exemplo n.º 24
0
 def __init__(
     self,
     client,
     template_bank: TemplateBank,
     catalog: Catalog = None,
     catalog_lookup_kwargs: dict = None,
     interval: float = 10,
     keep: float = 86400,
     waveform_client=None,
 ):
     self.client = client
     self.waveform_client = waveform_client or client
     if catalog is None:
         catalog = Catalog()
     self.set_old_events(
         [EventInfo(ev.resource_id.id, event_time(ev)) for ev in catalog])
     self.template_bank = template_bank
     self.catalog_lookup_kwargs = catalog_lookup_kwargs or dict()
     self.interval = interval
     self.keep = keep
     self.threads = []
     self.triggered_events = Catalog()
     self.busy = False
     self.previous_time = UTCDateTime.now()
Exemplo n.º 25
0
def consolidate_qmls(directory, outfile=False):
    """
    Take directory of single-event qmls from above function and consolidate
    into one, year-long Catalog.write() qml file.
    :param directory: Directory of qml files
    :param outfile: Defaults to False, else is path to new outfile
    :return: obspy.core.Catalog
    """
    qmls = glob(directory)
    cat = Catalog()
    for qml in qmls:
        cat += read_events(qml)
    if outfile:
        cat.write(outfile)
    return cat
Exemplo n.º 26
0
def space_time_cluster(catalog, t_thresh, d_thresh):
    """
    Cluster detections in space and time.

    Use to separate repeaters from other events.  Clusters by distance
    first, then removes events in those groups that are at different times.

    :type catalog: obspy.core.event.Catalog
    :param catalog: Catalog of events to clustered
    :type t_thresh: float
    :param t_thresh: Maximum inter-event time threshold in seconds
    :type d_thresh: float
    :param d_thresh: Maximum inter-event distance in km

    :returns: list of :class:`obspy.core.event.Catalog` objects
    :rtype: list

    >>> from eqcorrscan.utils.clustering import space_time_cluster
    >>> from obspy.clients.fdsn import Client
    >>> from obspy import UTCDateTime
    >>> client = Client("https://earthquake.usgs.gov")
    >>> starttime = UTCDateTime("2002-01-01")
    >>> endtime = UTCDateTime("2002-02-01")
    >>> cat = client.get_events(starttime=starttime, endtime=endtime,
    ...                         minmagnitude=6)
    >>> groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000)
    """
    initial_spatial_groups = catalog_cluster(catalog=catalog,
                                             thresh=d_thresh,
                                             metric="distance",
                                             show=False)
    # Need initial_spatial_groups to be lists at the moment
    initial_spatial_lists = []
    for group in initial_spatial_groups:
        initial_spatial_lists.append(list(group))
    # Check within these groups and throw them out if they are not close in
    # time.
    groups = []
    for group in initial_spatial_lists:
        if len(group) > 1:
            sub_time_cluster = catalog_cluster(catalog=group,
                                               thresh=t_thresh,
                                               metric="time",
                                               show=False)
            groups.extend(sub_time_cluster)
        else:
            groups.append(group)
    return [Catalog(group) for group in groups]
Exemplo n.º 27
0
 def setUpClass(cls):
     sfiles = glob.glob(
         os.path.join(os.path.dirname(os.path.abspath(__file__)),
                      'test_data/REA/TEST_/*.S??????'))
     cls.catalog = Catalog()
     cls.streams = []
     for sfile in sfiles:
         cls.catalog += read_events(sfile)
         wavefile = readwavename(sfile)[0]
         stream_path = os.path.join(
             os.path.dirname(os.path.abspath(__file__)),
             'test_data/WAV/TEST_', wavefile)
         stream = read(stream_path)
         for tr in stream:
             tr.stats.channel = tr.stats.channel[0] + tr.stats.channel[-1]
         cls.streams.append(stream)
Exemplo n.º 28
0
def space_time_cluster(catalog, t_thresh, d_thresh):
    """
    Cluster detections in space and time.

    Use to separate repeaters from other events.  Clusters by distance
    first, then removes events in those groups that are at different times.

    :type catalog: obspy.core.event.Catalog
    :param catalog: Catalog of events to clustered
    :type t_thresh: float
    :param t_thresh: Maximum inter-event time threshold in seconds
    :type d_thresh: float
    :param d_thresh: Maximum inter-event distance in km

    :returns: list of :class:`obspy.core.event.Catalog` objects
    :rtype: list

    >>> from eqcorrscan.utils.clustering import space_time_cluster
    >>> from obspy.clients.fdsn import Client
    >>> from obspy import UTCDateTime
    >>> client = Client("IRIS")
    >>> starttime = UTCDateTime("2002-01-01")
    >>> endtime = UTCDateTime("2002-02-01")
    >>> cat = client.get_events(starttime=starttime, endtime=endtime,
    ...                         minmagnitude=6, catalog="ISC")
    >>> groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000)
    """
    initial_spatial_groups = space_cluster(catalog=catalog,
                                           d_thresh=d_thresh,
                                           show=False)
    # Need initial_spatial_groups to be lists at the moment
    initial_spatial_lists = []
    for group in initial_spatial_groups:
        initial_spatial_lists.append(list(group))
    # Check within these groups and throw them out if they are not close in
    # time.
    groups = []
    for group in initial_spatial_lists:
        for master in group:
            for event in group:
                if abs(event.preferred_origin().time -
                       master.preferred_origin().time) > t_thresh:
                    # If greater then just put event in on it's own
                    groups.append([event])
                    group.remove(event)
        groups.append(group)
    return [Catalog(group) for group in groups]
Exemplo n.º 29
0
def cat_from_event_ids(cat_name, event_ids, csv_file):
    """
    Create an ObsPy catalog with moment tensors from a list of event ids
    """
    c = Client("GEONET")
    cat = Catalog()
    for eventid in event_ids:
        try:
            cat += c.get_events(eventid=eventid)[0]
        except FDSNNoDataException:
            print(f"No event for {eventid}")

    new_cat = check_moment_tensor(csv_file, cat)
    print("catalog has {} events".format(len(new_cat)))

    cat_w_mt = append_mt(new_cat, csv_file)
    cat_w_mt.write(f"{cat_name}_w_mt.xml", format="QUAKEML")
Exemplo n.º 30
0
    def get_events(self, starttime=UTCDateTime(2000, 1, 1),
                   endtime=UTCDateTime.now(), **kwargs):

        chunk_length = 365 * 86400  # Query length in seconds
        events = Catalog()
        while starttime <= endtime:
            print(starttime)
            events += self.client.get_events(starttime=starttime,
                                             endtime=starttime + chunk_length,
                                             **kwargs)
            if starttime + chunk_length > endtime:
                chunk = endtime - starttime
                if chunk <= 1:
                    break
            starttime += chunk_length
            
        self.events = _cat2df(events)