예제 #1
0
    def download_eventcat(self):
        event_cat_done = False

        while not event_cat_done:
            try:
                # Check length of request and split if longer than 20yrs.
                a = 20 * 365.25 * 24 * 3600  # 20 years in seconds
                if self.endtime - self.starttime > a:
                    # Request is too big, break it down ito several requests

                    starttimes = [self.starttime, self.starttime + a]
                    while self.endtime - starttimes[-1] > a:
                        starttimes.append(starttimes[-1] + a)
                    endtimes = []
                    endtimes.extend(starttimes[1:])
                    endtimes.append(self.endtime)

                    # Query
                    self.evtcat = Catalog()
                    for st, et in zip(starttimes, endtimes):
                        self.evtcat.extend(
                            self.webclient.get_events(
                                starttime=st,
                                endtime=et,
                                minlatitude=self.eMINLAT,
                                maxlatitude=self.eMAXLAT,
                                minlongitude=self.eMINLON,
                                maxlongitude=self.eMAXLON,
                                minmagnitude=self.minmag,
                                maxmagnitude=10,
                                maxdepth=self.maxdepth))
                    event_cat_done = True

                else:
                    self.evtcat = self.webclient.get_events(
                        starttime=self.starttime,
                        endtime=self.endtime,
                        minlatitude=self.eMINLAT,
                        maxlatitude=self.eMAXLAT,
                        minlongitude=self.eMINLON,
                        maxlongitude=self.eMAXLON,
                        minmagnitude=self.minmag,
                        maxmagnitude=10,
                        maxdepth=self.maxdepth)

                    event_cat_done = True

            except IncompleteRead:
                # Server interrupted connection, just try again
                msg = "Server interrupted connection, restarting download..."
                warn(msg, UserWarning)
                print(msg)
                continue

        os.makedirs(self.evtloc, exist_ok=True)
        # check if there is a better format for event catalog
        self.evtcat.write(os.path.join(
            self.evtloc,
            datetime.now().strftime("%Y%m%dT%H%M%S")),
                          format="QUAKEML")
예제 #2
0
def remove_unreferenced(catalog: Union[Catalog, Event]) -> Catalog:
    """ Remove un-referenced arrivals, amplitudes and station_magnitudes. """
    if isinstance(catalog, Event):
        catalog = Catalog([catalog])
    catalog_out = Catalog()
    for _event in catalog:
        event = _event.copy()
        pick_ids = {p.resource_id for p in event.picks}
        # Remove unreferenced arrivals
        for origin in event.origins:
            origin.arrivals = [
                arr for arr in origin.arrivals if arr.pick_id in pick_ids]
        # Remove unreferenced amplitudes
        event.amplitudes = [
            amp for amp in event.amplitudes if amp.pick_id in pick_ids]
        amplitude_ids = {a.resource_id for a in event.amplitudes}
        # Remove now unreferenced station magnitudes
        event.station_magnitudes = [
            sta_mag for sta_mag in event.station_magnitudes
            if sta_mag.amplitude_id in amplitude_ids]
        station_magnitude_ids = {
            sta_mag.resource_id for sta_mag in event.station_magnitudes}
        # Remove unreferenced station_magnitude_contributions
        for magnitude in event.magnitudes:
            magnitude.station_magnitude_contributions = [
                sta_mag_contrib
                for sta_mag_contrib in magnitude.station_magnitude_contributions
                if sta_mag_contrib.station_magnitude_id in station_magnitude_ids]
        catalog_out.append(event)

    return catalog_out
예제 #3
0
def run_hashpy(catalog, config, outfile):
    """
    Wrapper on hashpy for calculating HASH focal mechanisms
    :param catalog: :class: obspy.core.event.Catalog
    :param config: Configuration dict for hashpy
    :return:
    """
    new_cat = Catalog()
    for ev in catalog:
        eid = str(ev.resource_id).split('/')[-1]
        # Set up hashpy object
        hp = HashPype(**config)
        hp.input(ev, format="OBSPY")
        hp.load_velocity_models()
        hp.generate_trial_data()
        try:
            hp.calculate_takeoff_angles()
        except:
            print('Error in toa calc for eid: {}'.format(eid))
            continue
        pass1 = hp.check_minimum_polarity()
        pass2 = hp.check_maximum_gap()
        if pass1 and pass2:
            try:
                hp.calculate_hash_focalmech()
                hp.calculate_quality()
            except:
                print('Error in fm calc for eid: {}'.format(eid))
                continue
        else:
            print("Minimum polarity and/or maximum gap check failed")
            continue
        new_cat += hp.output(format="OBSPY")
    new_cat.write(outfile, format="QUAKEML")
    return
예제 #4
0
    def test_fail_writing(self):
        """
        Test a deliberate fail.
        """
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        test_ev = test_cat[0]
        test_cat.append(full_test_event())
        with self.assertRaises(NordicParsingError):
            # Raises error due to multiple events in catalog
            _write_nordic(test_cat, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to too long userid
            _write_nordic(test_ev, filename=None, userid='TESTICLE',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to unrecognised event type
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='U', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to no output directory
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='L', outdir='albatross',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()

        invalid_origin.origins = []
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()
        invalid_origin.origins[0].time = None
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        # Write a near empty origin
        valid_origin = test_ev.copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        with NamedTemporaryFile() as tf:
            _write_nordic(valid_origin, filename=tf.name, userid='TEST',
                          evtype='L', outdir='.', wavefiles='test',
                          explosion=True, overwrite=True)
            self.assertTrue(os.path.isfile(tf.name))
예제 #5
0
    def test_fail_writing(self):
        """
        Test a deliberate fail.
        """
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        test_ev = test_cat[0]
        test_cat.append(full_test_event())
        with self.assertRaises(NordicParsingError):
            # Raises error due to multiple events in catalog
            _write_nordic(test_cat, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to too long userid
            _write_nordic(test_ev, filename=None, userid='TESTICLE',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to unrecognised event type
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='U', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        with self.assertRaises(NordicParsingError):
            # Raises error due to no output directory
            _write_nordic(test_ev, filename=None, userid='TEST',
                          evtype='L', outdir='albatross',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()

        invalid_origin.origins = []
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        invalid_origin = test_ev.copy()
        invalid_origin.origins[0].time = None
        with self.assertRaises(NordicParsingError):
            _write_nordic(invalid_origin, filename=None, userid='TEST',
                          evtype='L', outdir='.',
                          wavefiles='test', explosion=True,
                          overwrite=True)
        # Write a near empty origin
        valid_origin = test_ev.copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        with NamedTemporaryFile() as tf:
            _write_nordic(valid_origin, filename=tf.name, userid='TEST',
                          evtype='L', outdir='.', wavefiles='test',
                          explosion=True, overwrite=True)
            self.assertTrue(os.path.isfile(tf.name))
예제 #6
0
def bbox_two_cat(cat1, cat2, bbox, depth_thresh):
    new_cat1 = Catalog()
    new_cat2 = Catalog()
    for i, ev in enumerate(cat1):
        if min(bbox[0]) <= ev.origins[-1].longitude <= max(bbox[0]) \
                and min(bbox[1]) <= ev.origins[-1].latitude <= max(bbox[1]) \
                and ev.origins[-1].depth <= depth_thresh * 1000:
            new_cat1.events.append(ev)
            new_cat2.events.append(cat2[i])
    return new_cat1, new_cat2
예제 #7
0
def read_quakemigrate(run_dir, units, run_subname="", local_mag_ph="S"):
    """
    Reads the .event and .picks outputs, and .amps outputs if available, from a
    QuakeMigrate run into an obspy Catalog object.

    NOTE: if a station_corrections dict was used to calculate the
    network-averaged local magnitude, this information will not be included in
    the obspy event object. There might therefore be a discrepancy between the
    mean of the StationMagnitudes and the event magnitude.

    Parameters
    ----------
    run_dir : str
        Path to QuakeMigrate run directory.
    units : {"km", "m"}
        Grid projection coordinates for QM LUT (determines units of depths and
        uncertainties in the .event files).
    run_subname : str, optional
        Run_subname string (if applicable).
    local_mag_ph : {"S", "P"}, optional
        Amplitude measurement used to calculate local magnitudes. (Default "S")

    Returns
    -------
    cat : `obspy.Catalog` object
        Catalog containing events in the specified QuakeMigrate run directory.

    """

    locate_dir = pathlib.Path(run_dir) / "locate" / run_subname
    events_dir = locate_dir / "events"

    if events_dir.is_dir():
        try:
            event_files = events_dir.glob("*.event")
            first = next(event_files)
            event_files = chain([first], event_files)
        except StopIteration:
            pass

    cat = Catalog()

    for eventf in event_files:
        event = _read_single_event(eventf, locate_dir, units, local_mag_ph)
        if event is None:
            continue
        else:
            cat.append(event)

    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__

    return cat
예제 #8
0
def remove_bad_picks(cat, st_dev):
    # For removing events with 1 or more bad picks
    filtered_cat = Catalog()
    for event in cat:
        pref_o = event.preferred_origin()
        bad_arrivals = [x for x in pref_o.arrivals if x.time_residual > st_dev]
        if bad_arrivals:
            del bad_arrivals
            continue
        else:
            filtered_cat.append(event)
            del bad_arrivals
예제 #9
0
def space_cluster(catalog, d_thresh, show=True):
    """
    Cluster a catalog by distance only.

    Will compute the\
    matrix of physical distances between events and utilize the\
    scipy.clusering.hierarchy module to perform the clustering.

    :type catalog: obspy.Catalog
    :param catalog: Catalog of events to clustered
    :type d_thresh: float
    :param d_thresh: Maximum inter-event distance threshold

    :returns: list of Catalog classes
    """
    from scipy.spatial.distance import squareform
    from scipy.cluster.hierarchy import linkage, dendrogram, fcluster
    import matplotlib.pyplot as plt
    from obspy import Catalog

    # Compute the distance matrix and linkage
    dist_mat = dist_mat_km(catalog)
    dist_vec = squareform(dist_mat)
    Z = linkage(dist_vec, method='average')

    # Cluster the linkage using the given threshold as the cutoff
    indices = fcluster(Z, t=d_thresh, criterion='distance')
    group_ids = list(set(indices))
    indices = [(indices[i], i) for i in range(len(indices))]

    if show:
        # Plot the dendrogram...if it's not way too huge
        dendrogram(Z, color_threshold=d_thresh,
                   distance_sort='ascending')
        plt.show()

    # Sort by group id
    indices.sort(key=lambda tup: tup[0])
    groups = []
    for group_id in group_ids:
        group = Catalog()
        for ind in indices:
            if ind[0] == group_id:
                group.append(catalog[ind[1]])
            elif ind[0] > group_id:
                # Because we have sorted by group id, when the index is greater
                # than the group_id we can break the inner loop.
                # Patch applied by CJC 05/11/2015
                groups.append(group)
                break
    groups.append(group)
    return groups
예제 #10
0
def space_cluster(catalog, d_thresh, show=True):
    """
    Cluster a catalog by distance only.

    Will compute the\
    matrix of physical distances between events and utilize the\
    scipy.clustering.hierarchy module to perform the clustering.

    :type catalog: obspy.Catalog
    :param catalog: Catalog of events to clustered
    :type d_thresh: float
    :param d_thresh: Maximum inter-event distance threshold

    :returns: list of Catalog classes
    """
    from scipy.spatial.distance import squareform
    from scipy.cluster.hierarchy import linkage, dendrogram, fcluster
    import matplotlib.pyplot as plt
    from obspy import Catalog

    # Compute the distance matrix and linkage
    dist_mat = dist_mat_km(catalog)
    dist_vec = squareform(dist_mat)
    Z = linkage(dist_vec, method='average')

    # Cluster the linkage using the given threshold as the cutoff
    indices = fcluster(Z, t=d_thresh, criterion='distance')
    group_ids = list(set(indices))
    indices = [(indices[i], i) for i in range(len(indices))]

    if show:
        # Plot the dendrogram...if it's not way too huge
        dendrogram(Z, color_threshold=d_thresh, distance_sort='ascending')
        plt.show()

    # Sort by group id
    indices.sort(key=lambda tup: tup[0])
    groups = []
    for group_id in group_ids:
        group = Catalog()
        for ind in indices:
            if ind[0] == group_id:
                group.append(catalog[ind[1]])
            elif ind[0] > group_id:
                # Because we have sorted by group id, when the index is greater
                # than the group_id we can break the inner loop.
                # Patch applied by CJC 05/11/2015
                groups.append(group)
                break
    groups.append(group)
    return groups
예제 #11
0
def find_common_events(catP, catS):
    """
    Takes parallel catalogs, one with P only, the other with added S phases
    :param catP: Catalog with only p-picks
    :param catS: Catalog with S-picks added
    :return: two parallel catalogs including events with S-picks and their corresponding P-only versions
    """
    comm_cat_S = Catalog()
    comm_cat_P = Catalog()
    for i, ev in enumerate(catS):
        if len([pk for pk in ev.picks if pk.phase_hint == 'S']) > 0:
            comm_cat_S.events.append(ev)
            comm_cat_P.events.append(catP[i])
    return comm_cat_P, comm_cat_S
예제 #12
0
파일: data_prep.py 프로젝트: cjhopp/scripts
def consolidate_qmls(directory, outfile=False):
    """
    Take directory of single-event qmls from above function and consolidate
    into one, year-long Catalog.write() qml file.
    :param directory: Directory of qml files
    :param outfile: Defaults to False, else is path to new outfile
    :return: obspy.core.Catalog
    """
    qmls = glob(directory)
    cat = Catalog()
    for qml in qmls:
        cat += read_events(qml)
    if outfile:
        cat.write(outfile)
    return cat
예제 #13
0
    def _read_from_folder(self, dirname):
        """
        Internal folder reader.

        :type dirname: str
        :param dirname: Folder to read from.
        """
        templates = _par_read(dirname=dirname, compressed=False)
        t_files = glob.glob(dirname + os.sep + '*.ms')
        tribe_cat_file = glob.glob(os.path.join(dirname, "tribe_cat.*"))
        if len(tribe_cat_file) != 0:
            tribe_cat = read_events(tribe_cat_file[0])
        else:
            tribe_cat = Catalog()
        previous_template_names = [t.name for t in self.templates]
        for template in templates:
            if template.name in previous_template_names:
                # Don't read in for templates that we already have.
                continue
            for event in tribe_cat:
                for comment in event.comments:
                    if comment.text == 'eqcorrscan_template_' + template.name:
                        template.event = event
            t_file = [t for t in t_files
                      if t.split(os.sep)[-1] == template.name + '.ms']
            if len(t_file) == 0:
                Logger.error('No waveform for template: ' + template.name)
                templates.remove(template)
                continue
            elif len(t_file) > 1:
                Logger.warning('Multiple waveforms found, using: ' + t_file[0])
            template.st = read(t_file[0])
        self.templates.extend(templates)
        return
예제 #14
0
 def test_read_wavename(self):
     testing_path = os.path.join(self.testing_path, '01-0411-15L.S201309')
     wavefiles = readwavename(testing_path)
     self.assertEqual(len(wavefiles), 1)
     # Test that full paths are handled
     test_event = full_test_event()
     # Add the event to a catalogue which can be used for QuakeML testing
     test_cat = Catalog()
     test_cat += test_event
     # Check the read-write s-file functionality
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test'])
     # Check that multiple wavefiles are read properly
     with TemporaryWorkingDirectory():
         sfile = _write_nordic(test_cat[0],
                               filename=None,
                               userid='TEST',
                               evtype='L',
                               outdir='.',
                               wavefiles=['walrus/test', 'albert'],
                               explosion=True,
                               overwrite=True)
         self.assertEqual(readwavename(sfile), ['test', 'albert'])
예제 #15
0
 def test_listener(self):
     listener = CatalogListener(
         client=Client("GEONET"),
         catalog=Catalog(),
         interval=20,
         template_bank=TemplateBank(base_path=self.test_path),
         catalog_lookup_kwargs=dict(latitude=-38.8,
                                    longitude=175.8,
                                    maxradius=3.),
         keep=600,
     )
     # Use a period of known seismicity: 2019-07-06, swarm under Taupo
     seconds_before_now = UTCDateTime.now() - UTCDateTime(2019, 7, 6, 9, 12)
     listener._test_start_step = seconds_before_now
     listener.background_run(filter_func=filter_events,
                             auto_pick=False,
                             make_templates=False,
                             template_kwargs=dict(lowcut=2.,
                                                  highcut=15.,
                                                  samp_rate=50.,
                                                  filt_order=4,
                                                  prepick=0.5,
                                                  length=3,
                                                  swin="P"))
     self.assertTrue(listener.busy)
     time.sleep(120)
     listener.background_stop()
     self.assertFalse(listener.busy)
     self.assertEqual(len(listener.old_events), 1)
예제 #16
0
    def put_templates(
        self,
        templates: Union[list, Tribe],
        update_index: bool = True,
    ) -> None:
        """
        Save templates to the database.

        Parameters
        ----------
        templates
            Templates to put into the database
        update_index
            Flag to indicate whether or not to update the entire event index
            after writing the new events.
        """
        for t in templates:
            assert (isinstance(t, Template))
        catalog = Catalog([t.event for t in templates])
        self.put_events(catalog, update_index=update_index)
        inner_put_template = partial(
            _put_template,
            path_structure=self.path_structure,
            template_name_structure=self.template_name_structure,
            bank_path=self.bank_path)
        _ = [_ for _ in self.executor.map(inner_put_template, templates)]
예제 #17
0
    def cluster(self, method, **kwargs):
        """
        Cluster the tribe.

        Cluster templates within a tribe: returns multiple tribes each of
        which could be stacked.

        :type method: str
        :param method:
            Method of stacking, see :mod:`eqcorrscan.utils.clustering`

        :return: List of tribes.

        .. rubric:: Example


        """
        from eqcorrscan.utils import clustering
        tribes = []
        func = getattr(clustering, method)
        if method in ['space_cluster', 'space_time_cluster']:
            cat = Catalog([t.event for t in self.templates])
            groups = func(cat, **kwargs)
            for group in groups:
                new_tribe = Tribe()
                for event in group:
                    new_tribe.templates.extend([t for t in self.templates
                                                if t.event == event])
                tribes.append(new_tribe)
        return tribes
예제 #18
0
 def test_more_than_three_mags(self):
     cat = Catalog()
     cat += full_test_event()
     cat[0].magnitudes.append(Magnitude(
         mag=0.9, magnitude_type='MS', creation_info=CreationInfo('TES'),
         origin_id=cat[0].origins[0].resource_id))
     with NamedTemporaryFile(suffix='.out') as tf:
         # raises UserWarning: mb is not convertible
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat.write(tf.name, format='nordic')
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat_back = read_events(tf.name)
         for event_1, event_2 in zip(cat, cat_back):
             self.assertTrue(
                 len(event_1.magnitudes) == len(event_2.magnitudes))
             _assert_similarity(event_1, event_2)
예제 #19
0
 def get_global_earthquakes(client, starttime, endtime):
   try:
     return client.get_events(
                     starttime=starttime,
                     endtime=endtime,
                     minmagnitude=6
                     )
   except Exception as e:
     print(e)
     return Catalog()
예제 #20
0
파일: data_prep.py 프로젝트: cjhopp/scripts
def mseed_2_Party(wav_dir, cat, temp_cat, lowcut, highcut, filt_order,
                  process_length, prepick):
    """
    Take waveforms and catalog and create a Party object
    :param wav_dir:
    :param cat:
    :return:
    """

    partay = Party()
    # Get templates first
    temp_tup = [(ev, str(ev.resource_id).split('/')[-1].split('_')[0])
                for ev in cat
                if str(ev.resource_id).split('/')[-1].split('_')[-1]=='self']
    temp_evs, temp_ids = zip(*temp_tup)
    temp_evs = list(temp_evs)
    wav_files = ['%s/%s.mseed' % (wav_dir, str(ev.resource_id).split('/')[-1])
                 for ev in temp_evs]
    temp_wavs = [read(wav) for wav in wav_files if os.path.isfile(wav)]
    for temp_wav, temp_ev in zip(temp_wavs, temp_evs):
        #Create a Template object, assign it to Family and then to Party
        tid = str(temp_ev.resource_id).split('/')[-1].split('_')[0]
        if len([ev for ev in temp_cat
                if str(ev.resource_id).split('/')[-1] == tid]) > 0:
            temp_ev = [ev for ev in temp_cat
                           if str(ev.resource_id).split('/')[-1] == tid][0]
        tmp = Template(name=tid, st=temp_wav, lowcut=lowcut, highcut=highcut,
                       samp_rate=temp_wav[0].stats.sampling_rate,
                       filt_order=filt_order, process_length=process_length,
                       prepick=prepick, event=temp_ev)
        fam_det_evs = [ev for ev in cat
                       if str(ev.resource_id).split('/')[-1].split('_')[-1]!='self'
                       and str(ev.resource_id).split('/')[-1].split('_')[0]==tid]
        fam_dets = [Detection(template_name=str(ev.resource_id).split('/')[-1].split('_')[0],
                              detect_time=UTCDateTime([com.text.split('=')[-1]
                                                       for com in ev.comments
                                                       if com.text.split('=')[0]=='det_time'][0]),
                              no_chans=len(ev.picks),
                              chans=[pk.waveform_id.station_code
                                     for pk in ev.picks],
                              detect_val=float([com.text.split('=')[-1]
                                                for com in ev.comments
                                                if com.text.split('=')[0]=='detect_val'][0]),
                              threshold=float([com.text.split('=')[-1]
                                               for com in ev.comments
                                               if com.text.split('=')[0]=='threshold'][0]),
                              typeofdet='corr',
                              threshold_type='MAD',
                              threshold_input=8.0,
                              event=ev, id=str(ev.resource_id).split('/')[-1])
                    for ev in fam_det_evs]
        fam_cat = Catalog(events=[det.event for det in fam_dets])
        fam = Family(template=tmp, detections=fam_dets, catalog=fam_cat)
        partay.families.append(fam)
    return partay
예제 #21
0
파일: events.py 프로젝트: cjhopp/scripts
def ORNL_events_to_cat(ornl_file):
    """Make Catalog from ORNL locations"""
    cat = Catalog()
    loc_df = pd.read_csv(ornl_file, infer_datetime_format=True)
    loc_df = loc_df.set_index('event_datetime')
    eid = 0
    for dt, row in loc_df.iterrows():
        ot = UTCDateTime(dt)
        hmc_east = row['x(m)']
        hmc_north = row['y(m)']
        hmc_elev = row['z(m)']
        errX = row['error_x (m)']
        errY = row['error_y (m)']
        errZ = row['error_z (m)']
        rms = row['rms (millisecond)']
        converter = SURF_converter()
        lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north,
                                              hmc_elev))
        o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev)
        o.origin_uncertainty = OriginUncertainty()
        o.quality = OriginQuality()
        ou = o.origin_uncertainty
        oq = o.quality
        ou.max_horizontal_uncertainty = np.max([errX, errY])
        ou.min_horizontal_uncertainty = np.min([errX, errY])
        o.depth_errors.uncertainty = errZ
        oq.standard_error = rms * 1e3
        extra = AttribDict({
            'hmc_east': {
                'value': hmc_east,
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': hmc_north,
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': hmc_elev,
                'namespace': 'smi:local/hmc'
            },
            'hmc_eid': {
                'value': eid,
                'namespace': 'smi:local/hmc'
            }
        })
        o.extra = extra
        rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f'))
        # Dummy magnitude of 1. for all events until further notice
        mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.))
        ev = Event(origins=[o], magnitudes=[mag], resource_id=rid)
        ev.preferred_origin_id = o.resource_id.id
        cat.events.append(ev)
        eid += 1
    return cat
예제 #22
0
def get_catalog(detections):
    """
    Generate an :class:`obspy.core.event.Catalog` from list of \
    :class:`DETECTION`'s.

    :type detections: list
    :param detections: list of :class:`eqcorrscan.core.match_filter.DETECTION`

    :returns: Catalog of detected events.
    :rtype: :class:`obspy.core.event.Catalog`

    .. warning::
        Will only work if the detections have an event associated with them.
        This will not be the case if detections have been written to csv
        format using :func:`eqcorrscan.core.match_filter.DETECTION.write`
        and read back in.
    """
    catalog = Catalog()
    for detection in detections:
        catalog.append(detection.event)
    return catalog
예제 #23
0
def filter_catalog(catalog,lon0,lat0,minrad,maxrad):
   temp_events = []
   for ev in catalog:
      ev_lat = ev.origins[0]['latitude']
      ev_lon = ev.origins[0]['longitude']
      distaz = gps2dist_azimuth(ev_lat,ev_lon,lat0,lon0)
      dist_m = distaz[0]
      dist_deg = kilometer2degrees((dist_m/1000.0))
      if dist_deg >= minrad and dist_deg <= maxrad:
         temp_events.append(ev)
   cat = Catalog(events=temp_events)
   return cat
예제 #24
0
 def test_more_than_three_mags(self):
     cat = Catalog()
     cat += full_test_event()
     cat[0].magnitudes.append(
         Magnitude(mag=0.9,
                   magnitude_type='MS',
                   creation_info=CreationInfo('TES'),
                   origin_id=cat[0].origins[0].resource_id))
     with NamedTemporaryFile(suffix='.out') as tf:
         # raises UserWarning: mb is not convertible
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat.write(tf.name, format='nordic')
         # raises "UserWarning: AIN in header, currently unsupported"
         with warnings.catch_warnings():
             warnings.simplefilter('ignore', UserWarning)
             cat_back = read_events(tf.name)
         for event_1, event_2 in zip(cat, cat_back):
             self.assertTrue(
                 len(event_1.magnitudes) == len(event_2.magnitudes))
             _assert_similarity(event_1, event_2)
예제 #25
0
 def get_japan_earthquakes(client, starttime, endtime):
   try:
     return client.get_events(
                     starttime=starttime,
                     endtime=endtime,
                     latitude=35.6895,
                     longitude=139.6917,
                     maxradius=15,
                     maxmagnitude=6
                     )
   except Exception as e:
     print(e)
     return Catalog()
예제 #26
0
    def __init__(
        self,
        catalog: Catalog,
        inventory: Inventory,
        phases: Optional[Collection[str]] = None,
        restrict_to_arrivals: bool = True,
    ):
        # check inputs
        # st_dict, catalog = self._validate_inputs(catalog, inventory, st_dict)
        catalog = catalog.copy()
        events = Catalog(
            events=[catalog]) if isinstance(catalog, Event) else catalog
        # Convert inventory to a dataframe if it isn't already
        inv_df = obsplus.stations_to_df(inventory)
        inv_df.set_index("seed_id", inplace=True)
        # get a df of all input data, perform sanity checks
        event_station_df = SpatialCalculator()(events, inv_df)
        # Calculate hypocentral distance
        event_station_df["hyp_distance_m"] = np.sqrt(
            event_station_df["distance_m"]**2 +
            event_station_df["vertical_distance_m"]**2)
        event_station_df.index.names = ["event_id", "seed_id"]
        # we need additional info from the stations, get it and join.
        self.event_station_df = self._join_station_info(
            inv_df, event_station_df)

        # self._join_station_info()
        df = self._get_meta_df(events,
                               phases=phases,
                               restrict_to_arrivals=restrict_to_arrivals)
        self.data = df
        # st_dict, catalog = self._validate_inputs(catalog, st_dict)
        # # get a df of all input data, perform sanity checks
        # df = self._get_meta_df(catalog, st_dict, phases=phases)
        # self.data = df
        # # add sampling rate to stats
        # self._stats = AttribDict(motion_type=motion_type)
        # init cache
        self._cache = {}
예제 #27
0
def cat_stat_ev_avg(cat):
    filtered_cat = Catalog()
    avg_arr_res = []
    for event in cat:
        pref_o = event.preferred_origin()
        # Calculate average arrival time residual for origin
        avg_arr_res.append(
            sum([x.time_residual for i, x in enumerate(pref_o.arrivals)]) / i)
    mean_avg_ev_res = np.mean(avg_arr_res)
    std_avg_ev_res = np.std(avg_arr_res)
    print('Catalog mean avg event arr. time residual of: %0.3f' %
          mean_avg_ev_res)
    print('Catalog st_dev avg event arr residual of: %0.3f' % std_avg_ev_res)
    for event in cat:
        pref_o = event.preferred_origin()
        avg_arr_res = sum(
            [x.time_residual for i, x in enumerate(pref_o.arrivals)]) / i
        if avg_arr_res < std_avg_ev_res:
            filtered_cat.append(event)
        else:
            continue
    return filtered_cat, avg_arr_res, mean_avg_ev_res, std_avg_ev_res
예제 #28
0
def append_mt(cat, csv_fid=None):
    """
    Append GeoNet moment tensor information to the events in catalog
    Assumes that all the events have moment tensor information, meaning 
    check_moment_tensor() should be run prior to this

    :type cat: obspy.Catalog
    :param cat: catalog of events to add moment tensor information to
    :type csv_fid: str
    :param csv_fid: csv file containing moment tensor information
    """
    cat_out = Catalog()
    events = []
    for event in cat:
        event_id = event.resource_id.id.split('/')[1]
        event_out, _ = geonet_mt(event_id=event_id, units="dynecm",
                                 event=event, csv_fid=csv_fid)
        events.append(event_out)

    cat_out.extend(events)

    return cat_out
예제 #29
0
    def __init__(self):
        # handle imports in init to avoid circular imports
        import obspy.core.event as ev
        from obspy import UTCDateTime, Catalog

        self.ev = ev
        self.UTCDateTime = UTCDateTime
        self.Catalog = Catalog
        self.ResourceIdentifier = ev.ResourceIdentifier
        # create events and bind to self
        self.time = UTCDateTime("2016-05-04T12:00:01")
        events = [self._create_event()]
        self.catalog = Catalog(events=events)
예제 #30
0
파일: client.py 프로젝트: fstettner/obspy
    def get_events(self, **kwargs):
        """
        Fetches a catalog with event information. Parameters to narrow down
        the request are the same as for :meth:`get_list`.

        ..warning::
            Only works when connecting to a SeisHub server of version 1.4.0
            or higher (serving event data as QuakeML).

        :rtype: :class:`~obspy.core.event.Catalog`
        :returns: Catalog containing event information matching the request.

        The number of resulting events is by default limited to 50 entries from
        a SeisHub server. You may raise this by setting the ``limit`` option to
        a maximal value of 2500. Numbers above 2500 will result into an
        exception.
        """
        resource_names = [item["resource_name"] for item in self.get_list(**kwargs)]
        cat = Catalog()
        for resource_name in resource_names:
            cat.extend(read_events(self.get_resource(resource_name)))
        return cat
예제 #31
0
    def get_events(self, **kwargs):
        """
        Fetches a catalog with event information. Parameters to narrow down
        the request are the same as for :meth:`get_list`.

        .. warning::
            Only works when connecting to a SeisHub server of version 1.4.0
            or higher (serving event data as QuakeML).

        :rtype: :class:`~obspy.core.event.Catalog`
        :returns: Catalog containing event information matching the request.

        The number of resulting events is by default limited to 50 entries from
        a SeisHub server. You may raise this by setting the ``limit`` option to
        a maximal value of 2500. Numbers above 2500 will result into an
        exception.
        """
        resource_names = [item["resource_name"]
                          for item in self.get_list(**kwargs)]
        cat = Catalog()
        for resource_name in resource_names:
            cat.extend(read_events(self.get_resource(resource_name)))
        return cat
예제 #32
0
def rand_cat_sample(cat, n_events, cat2=False):
    from obspy import Catalog
    rand_cat = Catalog()
    indices = np.random.choice(range(len(cat)), n_events, replace=False)
    rand_cat.events = [cat[i] for i in indices]
    if cat2:
        rand_cat2 = Catalog()
        rand_cat2.events = [cat[i] for i in indices]
    return rand_cat
예제 #33
0
    def write(self, filename, compress=True, catalog_format="QUAKEML"):
        """
        Write the tribe to a file using tar archive formatting.

        :type filename: str
        :param filename:
            Filename to write to, if it exists it will be appended to.
        :type compress: bool
        :param compress:
            Whether to compress the tar archive or not, if False then will
            just be files in a folder.
        :type catalog_format: str
        :param catalog_format:
            What format to write the detection-catalog with. Only Nordic,
            SC3ML, QUAKEML are supported. Note that not all information is
            written for all formats (QUAKEML is the most complete, but is
            slow for IO).

        .. rubric:: Example

        >>> tribe = Tribe(templates=[Template(name='c', st=read())])
        >>> tribe.write('test_tribe')
        Tribe of 1 templates
        """
        from eqcorrscan.core.match_filter import CAT_EXT_MAP

        if catalog_format not in CAT_EXT_MAP.keys():
            raise TypeError("{0} is not supported".format(catalog_format))
        dirname, ext = os.path.splitext(filename)
        if not os.path.isdir(dirname):
            os.makedirs(dirname)
        self._par_write(dirname)
        tribe_cat = Catalog()
        for t in self.templates:
            if t.event is not None:
                # Check that the name in the comment matches the template name
                for comment in t.event.comments:
                    if comment.text and comment.text.startswith(
                            "eqcorrscan_template_"):
                        comment.text = "eqcorrscan_template_{0}".format(t.name)
                tribe_cat.append(t.event)
        if len(tribe_cat) > 0:
            tribe_cat.write(
                os.path.join(dirname, 'tribe_cat.{0}'.format(
                    CAT_EXT_MAP[catalog_format])), format=catalog_format)
        for template in self.templates:
            template.st.write(
                os.path.join(dirname, '{0}.ms'.format(template.name)),
                format='MSEED')
        if compress:
            if not filename.endswith(".tgz"):
                Logger.info("Appending '.tgz' to filename.")
                filename += ".tgz"
            with tarfile.open(filename, "w:gz") as tar:
                tar.add(dirname, arcname=os.path.basename(dirname))
            shutil.rmtree(dirname)
        return self
예제 #34
0
def run(**kwargs):
    config = read_config(config_file=kwargs.get("config_file", None))
    debug = kwargs.get("debug", False)
    update_bank = kwargs.get("update_bank", True)
    listener_starttime = kwargs.get("listener_starttime", None)
    if debug:
        config.log_level = "DEBUG"
        print("Using the following configuration:\n{0}".format(config))
    config.setup_logging()
    Logger.debug("Running in debug mode - expect lots of output!")

    client = config.rt_match_filter.get_client()

    trigger_func = partial(
        magnitude_rate_trigger_func,
        magnitude_threshold=config.reactor.magnitude_threshold,
        rate_threshold=config.reactor.rate_threshold,
        rate_bin=config.reactor.rate_radius,
        minimum_events_in_bin=config.reactor.minimum_events_in_bin)

    template_bank = TemplateBank(
        config.database_manager.event_path,
        name_structure=config.database_manager.name_structure,
        event_format=config.database_manager.event_format,
        path_structure=config.database_manager.path_structure,
        event_ext=config.database_manager.event_ext,
        executor=ProcessPoolExecutor())

    if update_bank:
        Logger.info("Updating bank before running")
        template_bank.update_index()

    listener = CatalogListener(
        client=client,
        catalog_lookup_kwargs=config.reactor.catalog_lookup_kwargs,
        template_bank=template_bank,
        interval=600,
        keep=86400.,
        catalog=Catalog(),
        waveform_client=config.rt_match_filter.get_waveform_client())

    reactor = Reactor(client=client,
                      listener=listener,
                      trigger_func=trigger_func,
                      template_database=template_bank,
                      config=config,
                      listener_starttime=listener_starttime)
    reactor.run(max_run_length=config.reactor.max_run_length)
    return
예제 #35
0
파일: core.py 프로젝트: QuLogic/obspy
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.io.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left ``None``, the geographical coordinates in the "GEOGRAPHIC" line
        of NonLinLoc output are used.
        The function should accept three arguments x, y, z (each of type
        :class:`numpy.ndarray`) and return a tuple of three
        :class:`numpy.ndarray` (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        ``pick_id`` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rb") as fh:
                data = fh.read()
            data = data.decode("UTF-8")
        except Exception:
            try:
                data = filename.decode("UTF-8")
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode("UTF-8")

    # split lines and remove empty ones
    lines = [line for line in data.splitlines() if line.strip()]

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    cat = Catalog()
    lines_start = [i for i, line in enumerate(lines)
                   if line.startswith("NLLOC ")]
    lines_end = [i for i, line in enumerate(lines)
                 if line.startswith("END_NLLOC")]
    if len(lines_start) != len(lines_end):
        msg = ("NLLOC HYP file '{}' seems corrupt, number of 'NLLOC' lines "
               "does not match number of 'END_NLLOC' lines").format(filename)
        raise Exception(msg)
    start_end_indices = []
    for start, end in zip(lines_start, lines_end):
        start_end_indices.append(start)
        start_end_indices.append(end)
    if any(np.diff(start_end_indices) < 1):
        msg = ("NLLOC HYP file '{}' seems corrupt, inconsistent "
               "positioning of 'NLLOC' and 'END_NLLOC' lines "
               "detected.").format(filename)
        raise Exception(msg)
    for start, end in zip(lines_start, lines_end):
        event = _read_single_hypocenter(
            lines[start:end + 1], coordinate_converter=coordinate_converter,
            original_picks=original_picks)
        cat.append(event)
    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    return cat
예제 #36
0
파일: core.py 프로젝트: xiansch/obspy
def read_nlloc_hyp(filename, coordinate_converter=None, picks=None, **kwargs):
    """
    Reads a NonLinLoc Hypocenter-Phase file to a
    :class:`~obspy.core.event.Catalog` object.

    .. note::

        Coordinate conversion from coordinate frame of NonLinLoc model files /
        location run to WGS84 has to be specified explicitly by the user if
        necessary.

    .. note::

        An example can be found on the :mod:`~obspy.io.nlloc` submodule front
        page in the documentation pages.

    :param filename: File or file-like object in text mode.
    :type coordinate_converter: func
    :param coordinate_converter: Function to convert (x, y, z)
        coordinates of NonLinLoc output to geographical coordinates and depth
        in meters (longitude, latitude, depth in kilometers).
        If left ``None``, the geographical coordinates in the "GEOGRAPHIC" line
        of NonLinLoc output are used.
        The function should accept three arguments x, y, z (each of type
        :class:`numpy.ndarray`) and return a tuple of three
        :class:`numpy.ndarray` (lon, lat, depth in kilometers).
    :type picks: list of :class:`~obspy.core.event.Pick`
    :param picks: Original picks used to generate the NonLinLoc location.
        If provided, the output event will include the original picks and the
        arrivals in the output origin will link to them correctly (with their
        ``pick_id`` attribute). If not provided, the output event will include
        (the rather basic) pick information that can be reconstructed from the
        NonLinLoc hypocenter-phase file.
    :rtype: :class:`~obspy.core.event.Catalog`
    """
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # split lines and remove empty ones
    lines = [line for line in data.splitlines() if line.strip()]

    # remember picks originally used in location, if provided
    original_picks = picks
    if original_picks is None:
        original_picks = []

    cat = Catalog()
    while lines:
        while not lines[0].startswith("NLLOC "):
            line = lines.pop(0)
            msg = ("Ignoring an unexpected line in NLLOC_HYP "
                   "file:\n'{}'".format(line))
            warnings.warn(msg)
        for i, line in enumerate(lines):
            if line.startswith("END_NLLOC"):
                break
        else:
            msg = ("NLLOC HYP file seems corrupt,"
                   " could not detect 'END_NLLOC' line.")
            raise RuntimeError(msg)
        event = _read_single_hypocenter(
            lines[:i+1], coordinate_converter=coordinate_converter,
            original_picks=original_picks)
        cat.append(event)
        lines = lines[i+1:]
    cat.creation_info.creation_time = UTCDateTime()
    cat.creation_info.version = "ObsPy %s" % __version__
    return cat