Example #1
0
def average_rate(catalog: Union[List[Detection], Catalog],
                 starttime: Optional[UTCDateTime] = None,
                 endtime: Optional[UTCDateTime] = None) -> float:
    """
    Compute mean rate (in events per day) of occurrence of events in catalog.

    Parameters
    ----------
    catalog
        Catalog of events, or list of detections
    starttime
        Start-time to calculate rate for, if not set will use the time of the
        first event in the catalog
    endtime
        End-time to calculate rate for, if not set will use the time of the
        last event in the catalog

    Returns
    -------
    Average rate over duration of catalog. Units: events / day
    """
    if len(catalog) <= 1:
        return 0.
    assert isinstance(catalog, (Catalog, list))
    if isinstance(catalog, Catalog):
        event_times = sorted([event_time(e) for e in catalog])
    elif isinstance(catalog, list):
        assert all([isinstance(d, Detection) for d in catalog])
        event_times = sorted([d.detect_time for d in catalog])
    starttime = starttime or event_times[0]
    endtime = endtime or event_times[-1]
    duration = (endtime - starttime) / 86400.
    return len(event_times) / duration
Example #2
0
 def __init__(
     self,
     client,
     template_bank: TemplateBank,
     catalog: Catalog = None,
     catalog_lookup_kwargs: dict = None,
     interval: float = 10,
     keep: float = 86400,
     waveform_client=None,
 ):
     self.client = client
     self.waveform_client = waveform_client or client
     if catalog is None:
         catalog = Catalog()
     self.set_old_events(
         [EventInfo(ev.resource_id.id, event_time(ev)) for ev in catalog])
     self.template_bank = template_bank
     self.catalog_lookup_kwargs = catalog_lookup_kwargs or dict()
     self.interval = interval
     self.keep = keep
     self.threads = []
     self.triggered_events = Catalog()
     self.busy = False
     self.previous_time = UTCDateTime.now()
Example #3
0
    def run(
        self,
        make_templates: bool = True,
        template_kwargs: dict = None,
        min_stations: int = 0,
        auto_event: bool = True,
        auto_picks: bool = True,
        event_type: Union[list, str] = None,
        filter_func: Callable = None,
        starttime: UTCDateTime = None,
        **filter_kwargs,
    ) -> None:
        """
        Run the listener. New events will be added to the template_bank.

        Parameters
        ----------
        make_templates:
            Whether to add new templates to the database (True) or not.
        template_kwargs:
            Dictionary of keyword arguments for making templates, requires
            at-least: lowcut, highcut, samp_rate, filt_order, prepick, length,
            swin.
        min_stations:
            Minimum number of stations for an event to be added to the
            TemplateBank
        auto_event:
            If True, both automatic and manually reviewed events will be
            included. If False, only manually reviewed events will be included
        auto_picks:
            If True, both automatic and manual picks will be included. If False
            only manually reviewed picks will be included. Note that this is
            done **before** counting the number of stations.
        event_type
            List of event types to keep.
        filter_func
            Function used for filtering. If left as none, this will use the
            `catalog_listener.filter_events` function.
        starttime
            When to start to get events from, defaults to the last time
            the listener was run.
        filter_kwargs:
            If the `filter_func` has changed then this should be the
            additional kwargs for the user-defined filter_func.
        """
        self.busy = True
        if starttime is None:
            self.previous_time -= self._test_start_step
        else:
            self.previous_time = starttime
        template_kwargs = template_kwargs or dict()
        loop_duration = 0  # Timer for loop, used in synthesising speed-ups
        while self.busy:
            tic = time.time()  # Timer for loop, used in synthesising speed-ups
            if self._test_start_step > 0:
                # Still processing past data
                self._test_start_step -= loop_duration * self._speed_up
                self._test_start_step += loop_duration
                # Account for UTCDateTime.now() already including loop_
                # duration once.
            elif self._test_start_step < 0:
                # We have gone into the future!
                raise NotImplementedError(
                    "Trying to access future data: spoilers not allowed")
            now = UTCDateTime.now() - self._test_start_step
            # Remove old events from cache
            self._remove_old_events(now)
            Logger.debug("Checking for new events between {0} and {1}".format(
                self.previous_time, now))
            try:
                # Check for new events - add in a pad of five times the
                # checking interval to ensure that slow-to-update events are
                # included.
                new_events = self.client.get_events(
                    starttime=self.previous_time - (5 * self.interval),
                    endtime=now,
                    **self.catalog_lookup_kwargs)
            except Exception as e:
                if "No data available for request" in e.args[0]:
                    Logger.debug("No new data")
                else:  # pragma: no cover
                    Logger.error(
                        "Could not download data between {0} and {1}".format(
                            self.previous_time, now))
                    Logger.error(e)
                time.sleep(self.sleep_interval)
                toc = time.time(
                )  # Timer for loop, used in synthesising speed-ups
                loop_duration = toc - tic
                continue
            if new_events is not None:
                if filter_func is not None:
                    new_events = filter_func(new_events,
                                             min_stations=min_stations,
                                             auto_picks=auto_picks,
                                             auto_event=auto_event,
                                             event_type=event_type,
                                             **filter_kwargs)
                old_event_ids = [tup[0] for tup in self.old_events]
                new_events = Catalog([
                    ev for ev in new_events
                    if ev.resource_id not in old_event_ids
                ])
                Logger.info("{0} new events between {1} and {2}".format(
                    len(new_events), self.previous_time, now))
                if len(new_events) > 0:
                    Logger.info("Adding {0} new events to the database".format(
                        len(new_events)))
                    for event in new_events:
                        try:
                            origin = (event.preferred_origin()
                                      or event.origins[0])
                        except IndexError:
                            continue
                        try:
                            magnitude = (event.preferred_magnitude()
                                         or event.magnitudes[0])
                        except IndexError:
                            continue
                        Logger.info("Event {0}: M {1:.1f}, lat: {2:.2f}, "
                                    "long: {3:.2f}, depth: {4:.2f}km".format(
                                        event.resource_id.id, magnitude.mag,
                                        origin.latitude, origin.longitude,
                                        origin.depth / 1000.))
                    event_info = [
                        EventInfo(ev.resource_id.id, event_time(ev))
                        for ev in new_events
                    ]
                    if make_templates:
                        self.template_bank.make_templates(
                            new_events,
                            client=self.waveform_client,
                            **template_kwargs)
                    else:
                        self.template_bank.put_events(new_events)
                    # Putting the events in the bank clears the catalog.
                    Logger.info(
                        f"Putting events into old_events: \n{event_info}")
                    # Try looping... extend seems unstable?
                    for ev_info in event_info:
                        self.append(ev_info)
                    # self.extend(event_info)
                    Logger.info("Old events current state: {0}".format(
                        self.old_events))
            self.previous_time = now
            # Sleep in steps to make death responsive
            _sleep_step = min(10.0, self.sleep_interval)
            _slept = 0.0
            while _slept < self.sleep_interval:
                _tic = time.time()
                time.sleep(
                    _sleep_step)  # Need to sleep to allow other threads to run
                if not self.busy:
                    break
                _toc = time.time()
                _slept += _toc - _tic
            toc = time.time()  # Timer for loop, used in synthesising speed-ups
            loop_duration = toc - tic
        return
Example #4
0
def run(working_dir: str, cores: int = 1, log_to_screen: bool = False):
    os.chdir(working_dir)
    Logger.debug("Reading config")
    config = read_config('rt_eqcorrscan_config.yml')
    config.setup_logging(
        screen=log_to_screen, file=True,
        filename="{0}/rt_eqcorrscan_{1}.log".format(
            working_dir,
            os.path.split(working_dir)[-1]))
    # Enforce a local-wave-bank for the spun-up real-time instance
    config.rt_match_filter.local_wave_bank = "streaming_wavebank"

    triggering_event = read_events('triggering_event.xml')[0]
    Logger.debug(f"Triggered by {triggering_event}")
    min_stations = config.rt_match_filter.get("min_stations", None)
    Logger.info("Reading the Tribe")
    tribe = Tribe().read("tribe.tgz")
    # Remove file to avoid re-reading it
    os.remove("tribe.tgz")

    Logger.info("Read in {0} templates".format(len(tribe)))
    if len(tribe) == 0:
        Logger.warning("No appropriate templates found")
        return
    Logger.info("Checking tribe quality: removing templates with "
                "fewer than {0} stations".format(min_stations))
    tribe = check_tribe_quality(
        tribe, min_stations=min_stations, **config.template)
    Logger.info("Tribe now contains {0} templates".format(len(tribe)))
    if len(tribe) == 0:
        return None, None

    client = config.rt_match_filter.get_client()
    rt_client = config.streaming.get_streaming_client()

    inventory = get_inventory(
        client, tribe, triggering_event=triggering_event,
        max_distance=config.rt_match_filter.max_distance,
        n_stations=config.rt_match_filter.n_stations)
    if len(inventory) == 0:
        Logger.critical(
            f"No inventory within {config.rt_match_filter.max_distance}"
            f"km of the trigger matching your templates, not running")
        return None, None
    detect_interval = config.rt_match_filter.get(
        "detect_interval", 60)
    plot = config.rt_match_filter.get("plot", False)
    real_time_tribe = RealTimeTribe(
        tribe=tribe, inventory=inventory, rt_client=rt_client,
        detect_interval=detect_interval, plot=plot,
        plot_options=config.plot,
        name=triggering_event.resource_id.id.split('/')[-1],
        wavebank=config.rt_match_filter.local_wave_bank)
    if real_time_tribe.expected_seed_ids is None:
        Logger.error("No matching channels in inventory and templates")
        return

    # Disable parallel processing for subprocess
    real_time_tribe._parallel_processing = False
    # Set the maximum correlation core-count
    if config.rt_match_filter.get("max_correlation_cores", None):
        cores = min(cores, config.rt_match_filter.max_correlation_cores)
    real_time_tribe.max_correlation_cores = cores

    Logger.info("Created real-time tribe with inventory:\n{0}".format(
        inventory))

    # TODO: How will this work? Currently notifiers are not implemented
    # real_time_tribe.notifier = None

    backfill_to = event_time(triggering_event) - 180
    backfill_client = config.rt_match_filter.get_waveform_client()

    if backfill_client and real_time_tribe.wavebank:
        # Download the required data and write it to disk.
        endtime = UTCDateTime.now()
        Logger.info(
            f"Backfilling between {backfill_to} and {endtime}")
        st = Stream()
        for network in inventory:
            for station in network:
                for channel in station:
                    Logger.info(
                        f"Downloading for {network.code}.{station.code}."
                        f"{channel.location_code}.{channel.code}")
                    try:
                        st += backfill_client.get_waveforms(
                            network=network.code, station=station.code,
                            location=channel.location_code,
                            channel=channel.code,
                            starttime=backfill_to,
                            endtime=endtime)
                    except Exception as e:
                        Logger.error(e)
                        continue
        st = st.merge()
        Logger.info(f"Downloaded {len(st)} for backfill")
        if len(st) == 0:
            Logger.warning("No backfill available, skipping")
        else:
            st = st.split()  # Cannot write masked data
            real_time_tribe.wavebank.put_waveforms(st)
        backfill_stations = {tr.stats.station for tr in st}
        backfill_templates = [
            t for t in real_time_tribe.templates
            if len({tr.stats.station for tr in t.st}.intersection(
                backfill_stations)) >= min_stations]
        Logger.info("Computing backfill detections")
        real_time_tribe.backfill(
            templates=backfill_templates,
            threshold=config.rt_match_filter.threshold,
            threshold_type=config.rt_match_filter.threshold_type,
            trig_int=config.rt_match_filter.trig_int,
            hypocentral_separation=config.rt_match_filter.hypocentral_separation,
            keep_detections=86400,
            detect_directory="{name}/detections",
            plot_detections=config.rt_match_filter.plot_detections)

    Logger.info("Starting real-time run")
    
    real_time_tribe.run(
        threshold=config.rt_match_filter.threshold,
        threshold_type=config.rt_match_filter.threshold_type,
        trig_int=config.rt_match_filter.trig_int,
        hypocentral_separation=config.rt_match_filter.hypocentral_separation,
        min_stations=min_stations,
        keep_detections=86400,
        detect_directory="{name}/detections",
        plot_detections=config.rt_match_filter.plot_detections,
        save_waveforms=config.rt_match_filter.save_waveforms,
        max_run_length=config.rt_match_filter.max_run_length,
        minimum_rate=config.rt_match_filter.minimum_rate,
        backfill_to=backfill_to)
Example #5
0
 def test_extract_zero_time(self):
     self.assertEqual(UTCDateTime(0), event_time(Event()))
Example #6
0
 def test_extract_pick_time(self):
     no_origin = self.event.copy()
     no_origin.origins = []
     no_origin.preferred_origin_id = None
     self.assertEqual(self.event.picks[0].time, event_time(no_origin))
Example #7
0
 def test_extract_origin_time(self):
     self.assertEqual(self.event.origins[0].time, event_time(self.event))
Example #8
0
    def spin_up(
        self,
        triggering_event: Event,
        run: bool = True,
    ) -> Union[Party, tuple]:
        """
        Run the reactors response function.

        Parameters
        ----------
        triggering_event
            Event that triggered this run - needs to have at-least an origin.
        run
            Whether to run the real-time tribe immediately (True),
            or return it (False).
        """
        min_stations = self.listener_kwargs.get("min_stations", None)
        region = estimate_region(triggering_event)
        if region is None:
            return None, None
        region.update(self.template_lookup_kwargs)
        Logger.info("Getting templates within {0}".format(region))
        tribe = self.template_database.get_templates(**region)
        Logger.info("Read in {0} templates".format(len(tribe)))
        tribe.templates = [
            t for t in tribe if t.name not in self.running_template_ids
        ]
        if len(tribe) == 0:
            Logger.warning(
                "No appropriate templates for event: {0}".format(region))
            return None, None
        Logger.info("Checking tribe quality: removing templates with "
                    "fewer than {0} stations".format(min_stations))
        tribe = check_tribe_quality(tribe,
                                    min_stations=min_stations,
                                    **self.listener_kwargs["template_kwargs"])
        Logger.info("Tribe now contains {0} templates".format(len(tribe)))
        inventory = get_inventory(self.client,
                                  tribe,
                                  triggering_event=triggering_event,
                                  max_distance=self.max_station_distance,
                                  n_stations=self.n_stations)
        detect_interval = self.real_time_tribe_kwargs.get(
            "detect_interval", 60)
        plot = self.real_time_tribe_kwargs.get("plot", False)
        if plot and self._plotting is not None:
            Logger.warning(
                "Cannot plot for more than one real-time-tribe at once.")
            plot = False
        elif plot:
            self._plotting = triggering_event.resource_id
        real_time_tribe = RealTimeTribe(
            tribe=tribe,
            inventory=inventory,
            rt_client=self.rt_client.copy(),
            detect_interval=detect_interval,
            plot=plot,
            plot_options=self.plot_kwargs,
            name=triggering_event.resource_id.id.split('/')[-1])
        Logger.info(
            "Created real-time tribe with inventory:\n{0}".format(inventory))
        real_time_tribe.notifier = self.notifier

        real_time_tribe_kwargs = {
            "backfill_to": event_time(triggering_event),
            "backfill_client": self.listener.waveform_client,
            "cores": self.available_cores
        }
        real_time_tribe_kwargs.update(self.real_time_tribe_kwargs)
        self.running_tribes.update({
            triggering_event.resource_id.id: {
                "tribe": real_time_tribe,
                "region": region
            }
        })
        self.running_template_ids.update({t.name for t in real_time_tribe})
        if run:
            return real_time_tribe.run(**real_time_tribe_kwargs)
        else:
            return real_time_tribe, real_time_tribe_kwargs
Example #9
0
    def run(
        self,
        make_templates: bool = True,
        template_kwargs: dict = None,
        min_stations: int = 0,
        auto_event: bool = True,
        auto_picks: bool = True,
        event_type: Union[list, str] = None,
        filter_func: Callable = None,
        **filter_kwargs,
    ) -> None:
        """
        Run the listener. New events will be added to the template_bank.

        Parameters
        ----------
        make_templates:
            Whether to add new templates to the database (True) or not.
        template_kwargs:
            Dictionary of keyword arguments for making templates, requires
            at-least:
              - lowcut
              - highcut
              - samp_rate
              - filt_order
              - prepick
              - length
              - swin
        min_stations:
            Minimum number of stations for an event to be added to the
            TemplateBank
        auto_event:
            If True, both automatic and manually reviewed events will be
            included. If False, only manually reviewed events will be included
        auto_picks:
            If True, both automatic and manual picks will be included. If False
            only manually reviewed picks will be included. Note that this is
            done **before** counting the number of stations.
        event_type
            List of event types to keep.
        filter_func
            Function used for filtering. If left as none, this will use the
            `catalog_listener.filter_events` function.
        filter_kwargs:
            If the `filter_func` has changed then this should be the
            additional kwargs for the user-defined filter_func.
        """
        self.busy = True
        self.previous_time -= self._test_start_step
        template_kwargs = template_kwargs or dict()
        while self.busy:
            now = UTCDateTime.now() - self._test_start_step
            # Remove old events from dict
            self._remove_old_events(now)
            Logger.debug("Checking for new events between {0} and {1}".format(
                self.previous_time, now))
            try:
                new_events = self.client.get_events(
                    starttime=self.previous_time,
                    endtime=now,
                    **self.catalog_lookup_kwargs)
            except Exception as e:
                if "No data available for request" in e.args[0]:
                    Logger.debug("No new data")
                else:  # pragma: no cover
                    Logger.error(
                        "Could not download data between {0} and {1}".format(
                            self.previous_time, now))
                    Logger.error(e)
                time.sleep(self.interval)
                continue
            if new_events is not None:
                Logger.info("{0} new events between {1} and {2}".format(
                    len(new_events), self.previous_time, now))
                if filter_func is not None:
                    filter_func(new_events,
                                min_stations=min_stations,
                                auto_picks=auto_picks,
                                auto_event=auto_event,
                                event_type=event_type,
                                **filter_kwargs)
                old_event_ids = [tup[0] for tup in self.old_events]
                new_events = Catalog([
                    ev for ev in new_events
                    if ev.resource_id not in old_event_ids
                ])
                if len(new_events) > 0:
                    Logger.info("Adding {0} new events to the database".format(
                        len(new_events)))
                    for event in new_events:
                        try:
                            origin = (event.preferred_origin()
                                      or event.origins[0])
                        except IndexError:
                            continue
                        try:
                            magnitude = (event.preferred_magnitude()
                                         or event.magnitudes[0])
                        except IndexError:
                            continue
                        Logger.info("Event {0}: M {1:.1f}, lat: {2:.2f}, "
                                    "long: {3:.2f}, depth: {4:.2f}km".format(
                                        event.resource_id.id, magnitude.mag,
                                        origin.latitude, origin.longitude,
                                        origin.depth / 1000.))
                    event_info = [(ev.resource_id.id, event_time(ev))
                                  for ev in new_events]
                    if make_templates:
                        self.template_bank.make_templates(
                            new_events,
                            client=self.waveform_client,
                            **template_kwargs)
                    else:
                        self.template_bank.put_events(new_events)
                    # Putting the events in the bank clears the catalog.
                    self.old_events.extend(event_info)
                    Logger.debug("Old events current state: {0}".format(
                        self.old_events))
            self.previous_time = now
            time.sleep(self.interval)