Exemplo n.º 1
0
def rand_cat_sample(cat, n_events, cat2=False):
    from obspy import Catalog
    rand_cat = Catalog()
    indices = np.random.choice(range(len(cat)), n_events, replace=False)
    rand_cat.events = [cat[i] for i in indices]
    if cat2:
        rand_cat2 = Catalog()
        rand_cat2.events = [cat[i] for i in indices]
    return rand_cat
Exemplo n.º 2
0
        def write_quakeml(list_of_networks):
            events = []
            catalog = Catalog()
            for network in list_of_networks:
                for shot_line in network.shot_lines:
                    for shot in shot_line.shots:
                        origins = []
                        magnitudes = []
                        iris_custom_ns = "http://www.fdsn.org/xml/event/1/iris"
                        origin = obspy.core.event.origin.Origin()
                        origin.time = shot.start_time
                        origin.latitude = shot.lat
                        origin.longitude = shot.lon
                        origin.extra = {
                            'Elevation': {
                                'value': str(shot.elev),
                                'namespace': iris_custom_ns
                            }
                        }
                        if shot.depth != 0:
                            origin.depth = shot.depth
                        origins.append(origin)
                        magnitudes.append(
                            obspy.core.event.magnitude.Magnitude(
                                mag=shot.mag, magnitude_type=shot.mag_units))

                        identifier = obspy.core.event.base.ResourceIdentifier(
                            id=str(network.code) + "." +
                            str(shot_line.name[-3:]) + "." + str(shot.shot_id))
                        event = (obspy.core.event.Event(
                            resource_id=identifier,
                            event_type="Controlled Explosion",
                            origins=origins,
                            magnitudes=magnitudes))
                        event.extra = {
                            'Network': {
                                'value': str(network.code),
                                'type': 'attribute',
                                'namespace': iris_custom_ns
                            },
                            'ReportNum': {
                                'value': str(network.reportnum),
                                'type': 'attribute',
                                'namespace': iris_custom_ns
                            },
                            'ShotLine': {
                                'value': str(shot_line.name[-3:]),
                                'type': 'attribute',
                                'namespace': iris_custom_ns
                            },
                            'Shot_id': {
                                'value': str(shot.shot_id),
                                'type': 'attribute',
                                'namespace': iris_custom_ns
                            }
                        }
                        events.append(event)

                catalog.events = events

            if catalog.events:
                if outfile:
                    target = outfile
                else:
                    target = sys.stdout

                catalog.write(target,
                              "QUAKEML",
                              nsmap={"iris": iris_custom_ns})
            else:
                raise NoDataError("Request resulted in no data being returned")
Exemplo n.º 3
0
        # Should theoretically be able to extract detect time from first pick - template prepick time...
        det_time = min([pk.time for pk in ev.picks]) - 0.1
        wav_id = ResourceIdentifier(
            str(ev.resource_id).split('/')[-1].split('_')[0] + '_' +
            str(det_time))
        find_file = temp_dir.rstrip('*') + str(wav_id) + '.mseed'
        if os.path.isfile(find_file):
            new_fname = temp_dir.rstrip('*') + str(
                ev.resource_id).split('/')[-1].split('_')[0] + '_self.mseed'
            print('Renaming file: %s to %s' % (find_file, new_fname))
            os.rename(find_file, new_fname)
# Take subset of catalog for testing purposes
test_cat = cat[:100].copy()
rand_cat = Catalog()
rand_cat.events = [
    cat[i] for i in np.random.choice(range(len(cat)), 200, replace=False)
]

# Write dt.cc files...
# Supress stdout
with warnings.catch_warnings():
    warnings.filterwarnings("ignore", category=UserWarning)
    catalog_to_dd.write_correlations(cat,
                                     template_dict,
                                     extract_len=3,
                                     pre_pick=0.5,
                                     shift_len=0.3,
                                     lowcut=1.0,
                                     highcut=20.0,
                                     max_sep=4,
                                     min_link=6,
Exemplo n.º 4
0
    def process_new_events(self, new_events: Catalog) -> None:
        """
        Process any new events in the system.

        Check if new events should be in one of the already running
        tribes and add them. Check all other events for possible triggers and
        spin-up a detector instance for triggers.

        Parameters
        ----------
        new_events
            Catalog of new-events to be assessed.
        """
        for triggering_event_id, tribe_region in self._running_regions.items():
            try:
                add_events = get_events(new_events, **tribe_region)
            except Exception:
                # This only occurs when there are no events in the region
                # and is fixed by PR #177 on Obsplus.
                add_events = Catalog()
            # Don't trigger on events now running in another tribe.
            new_events.events = [e for e in new_events if e not in add_events]
            # TODO: Implement region growth based on new events added.
            added_ids = {e.resource_id.id
                         for e in add_events
                         }.difference(self.running_template_ids)
            if added_ids:
                tribe = self.template_database.get_templates(eventid=added_ids)
                tribe = check_tribe_quality(
                    tribe,
                    min_stations=self.config.rt_match_filter.min_stations,
                    **self.config.template)
                if len(tribe) > 0:
                    Logger.info(
                        f"Adding {len(tribe)} events to {triggering_event_id}")
                    template_dir = os.path.join(
                        _get_triggered_working_dir(triggering_event_id),
                        "new_templates")
                    if not os.path.isdir(template_dir):
                        os.makedirs(template_dir)
                    for template in tribe:
                        template.write(
                            filename=os.path.join(template_dir, template.name))
                    Logger.info(f"Written new templates to {template_dir}")
                    self._running_templates[triggering_event_id].update(
                        added_ids)
        trigger_events = self.trigger_func(new_events)
        # Sanitize trigger-events - make sure that multiple events that would otherwise
        # run together do not all trigger - sort by magnitude
        for trigger_event in trigger_events:
            # Make sure they all have a magnitude
            if len(trigger_event.magnitudes
                   ) == 0 or trigger_event.magnitudes[0] is None:
                trigger_event.magnitudes = [Magnitude(mag=-999)]
        trigger_events.events.sort(
            key=lambda e: (e.preferred_magnitude() or e.magnitudes[0]).mag,
            reverse=True)
        for trigger_event in trigger_events:
            if trigger_event in self._triggered_events:
                continue
            if trigger_event.resource_id.id in self.running_template_ids:
                Logger.info(
                    f"Not spinning up {trigger_event}: it is already running")
                continue
            Logger.warning(
                "Listener triggered by event {0}".format(trigger_event))
            if len(self._running_regions) >= self.available_cores:
                Logger.error("No more available processors")
                continue
            self._triggered_events.append(trigger_event)
            self.spin_up(trigger_event)
Exemplo n.º 5
0
def bounding_box(cat, bbox, depth_thresh):
    new_cat = Catalog()
    new_cat.events = [ev for ev in cat if min(bbox[0]) <= ev.origins[-1].longitude <= max(bbox[0])
                      and min(bbox[1]) <= ev.origins[-1].latitude <= max(bbox[1])
                      and ev.origins[-1].depth <= depth_thresh * 1000]
    return new_cat
Exemplo n.º 6
0
def plot_event_well_dist(catalog, well_file, flow_start, diffs,
                         temp_list='all', method='scatter', starttime=None,
                         endtime=None, title=None, show=True):
    """
    Function to plot events with distance from well as a function of time.
    :param cat: catalog of events
    :param well_file: text file of xyz feedzone pts
    :param flow_start: Start UTCdt of well flow to model
    :param diffs: list of diffusion values to plot
    :param temp_list: list of templates for which we'll plot detections
    :param method: plot the 'scatter' or daily 'average' distance or both
    :return: matplotlib.pyplot.Axes
    """
    well_pts = format_well_data(well_file)
    # Grab only templates in the list
    cat = Catalog()
    filt_cat = Catalog()
    if starttime and endtime:
        filt_cat.events = [ev for ev in catalog if ev.origins[-1].time
                           < endtime and ev.origins[-1].time >= starttime]
    else:
        filt_cat = catalog
    cat.events = [ev for ev in filt_cat if
                  str(ev.resource_id).split('/')[-1].split('_')[0] in
                  temp_list or temp_list == 'all']
    time_dist_tups = []
    cat_start = min([ev.origins[-1].time.datetime for ev in cat])
    cat_end = max([ev.origins[-1].time.datetime for ev in cat])
    for ev in cat:
        if ev.origins[-1]:
            dist = min([dist_calc((ev.origins[-1].latitude,
                                   ev.origins[-1].longitude,
                                   ev.origins[-1].depth / 1000.),
                                  pt) for pt in well_pts])
            time_dist_tups.append((ev.origins[-1].time.datetime,
                                  dist))
    times, dists = zip(*time_dist_tups)
    # Make DataFrame for boxplotting
    dist_df = pd.DataFrame()
    dist_df['dists'] = pd.Series(dists, index=times)
    # Add daily grouping column to df (this is crap, but can't find better)
    dist_df['day_num'] =  [date2num(dto.replace(hour=12, minute=0, second=0,
                                                microsecond=0).to_pydatetime())
                           for dto in dist_df.index]
    dist_df['dto_num'] =  [date2num(dt) for dt in dist_df.index]
    # Now create the pressure envelopes
    # Creating hourly datetime increments
    start = pd.Timestamp(flow_start.datetime)
    end = pd.Timestamp(cat_end)
    t = pd.to_datetime(pd.date_range(start, end, freq='H'))
    t = [date2num(d) for d in t]
    # Now diffusion y vals
    diff_ys = []
    for d in diffs:
        diff_ys.append([np.sqrt(60 * d * i / 4000 * np.pi) # account for kms
                        for i in range(len(t))])
    # Plot 'em up
    fig, ax = plt.subplots(figsize=(7, 6))
    # First boxplots
    u_days = list(set(dist_df.day_num))
    bins = [dist_df.loc[dist_df['day_num'] == d]['dists'].values
            for d in u_days]
    positions = [d for d in u_days]
    bplots = ax.boxplot(bins, positions=positions, patch_artist=True,
                        flierprops={'markersize': 0}, manage_xticks=False)
    for patch in bplots['boxes']:
        patch.set_facecolor('lightblue')
        patch.set_alpha(0.5)
    # First diffusions
    for i, diff_y in enumerate(diff_ys):
        ax.plot(t, diff_y,
                label='Diffusion envelope, D={} $m^2/s$'.format(str(diffs[i])))
    # Now events
    if method != 'scatter':
        dates = []
        day_avg_dist = []
        for date in date_generator(cat_start, cat_end):
            dates.append(date)
            tdds = [tdd[1] for tdd in time_dist_tups if tdd[0] > date
                    and tdd[0] < date + timedelta(days=1)]
            day_avg_dist.append(np.mean(tdds))
    if method == 'scatter':
        ax.scatter(times, dists, color='gray', label='Event', s=10, alpha=0.5)
    elif method == 'average':
        ax.plot(dates, day_avg_dist)
    elif method == 'both':
        ax.scatter(times, dists)
        ax.plot(dates, day_avg_dist, color='r')
    # Plot formatting
    fig.autofmt_xdate()
    ax.legend()
    ax.set_ylim([0, 6])
    if title:
        ax.set_title(title, fontsize=19)
    else:
        ax.set_title('Fluid diffusion envelopes and earthquake distance')
    if starttime:
        ax.set_xlim([date2num(starttime.datetime), max(t)])
    else:
        ax.set_xlim([min(t), max(t)])
    ax.set_xlabel('Date')
    ax.set_ylabel('Distance (km)')
    fig.tight_layout()
    if show:
        fig.show()
    return ax