Пример #1
0
    def test_round_trip(self):
        config = Config()
        tmp_file = "tmp_config.yml"
        config.write(tmp_file)
        self.files_to_remove.append(tmp_file)

        config_back = read_config(tmp_file)
        self.assertEqual(config, config_back)
Пример #2
0
    def test_equality(self):
        config = Config()
        self.assertEqual(config, config)
        self.assertNotEqual(config, "walrous")
        altered_config = Config(plot={"plot_length": 500.})
        self.assertNotEqual(config, altered_config)

        plot = PlotConfig()
        plot_extras = PlotConfig(animal="albatross")
        self.assertNotEqual(plot, plot_extras)
Пример #3
0
    def test_default_config(self):
        config = Config()
        # Check that we can get attributes
        self.assertIsInstance(config.rt_match_filter.n_stations, int)
        self.assertIsInstance(config.rt_match_filter.plot, bool)
        self.assertIsInstance(config.rt_match_filter.seedlink_server_url, str)
        client = config.rt_match_filter.get_client()
        self.assertTrue(hasattr(client, "get_events"))

        self.assertIsInstance(config.reactor.magnitude_threshold, float)

        self.assertIsInstance(config.plot.lowcut, float)

        self.assertIsInstance(config.database_manager.event_path, str)
Пример #4
0
 def test_init_with_object(self):
     plot = PlotConfig(walrous=True, plot_length=500.)
     config = Config(plot=plot)
     self.assertEqual(config.plot.walrous, True)
     self.assertEqual(config.plot.plot_length, 500.)
Пример #5
0
 def test_init_with_dict(self):
     config = Config(plot={"walrous": True})
     self.assertEqual(config.plot.walrous, True)
     self.assertEqual(config.plot.plot_length, 600.)
Пример #6
0
 def test_bad_init(self):
     with self.assertRaises(NotImplementedError):
         Config(wilf="bob")
Пример #7
0
 def test_logging_setup(self):
     """ Just check that no error is raised """
     config = Config()
     config.setup_logging()
Пример #8
0
 def test_get_streaming_client(self):
     config = Config()
     rt_client = config.streaming.get_streaming_client()
     self.assertEqual(rt_client.server_url, config.streaming.rt_client_url)
Пример #9
0
def synthesise_real_time(
    triggering_event: Event,
    database_duration: float,
    config: Config,
    detection_runtime: float = 3600.0,
    make_templates: bool = True,
    speed_up: float = 1,
    debug: bool = False,
    query_interval: float = 60,
):
    """
    Synthesise a real-time matched-filter process for old data.

    Parameters
    ----------
    triggering_event:
        The Event that should trigger the system (must have happened in the
        past)
    database_duration:
        The duration to create the template database for in days prior to the
        triggering event
    config:
        Configuration for this synthesis
    detection_runtime:
        Maximum run-time for the detector in seconds
    make_templates:
        Whether templates need to be made or not.
    speed_up:
        Speed-up factor for detector - stream data faster than "real-time".
    debug:
        Whether to run logging in debug or not
    query_interval:
        How often to query the waveform server in seconds.  Smaller numbers
        will query more often, but this is limited by disk read speeds - make
        sure you don't go too small and make your system stall!
    """
    if debug:
        config.log_level = "DEBUG"
        print("Using the following configuration:\n{0}".format(config))
    config.setup_logging()

    client = config.rt_match_filter.get_client()

    trigger_origin = (triggering_event.preferred_origin()
                      or triggering_event.origins[0])
    region = estimate_region(triggering_event)
    database_starttime = trigger_origin.time - (database_duration * 86400)
    database_endtime = trigger_origin.time

    trigger_func = partial(
        magnitude_rate_trigger_func,
        magnitude_threshold=config.reactor.magnitude_threshold,
        rate_threshold=config.reactor.rate_threshold,
        rate_bin=config.reactor.rate_radius,
        minimum_events_in_bin=config.reactor.minimum_events_in_bin)

    template_bank = TemplateBank(
        config.database_manager.event_path,
        name_structure=config.database_manager.name_structure,
        event_format=config.database_manager.event_format,
        path_structure=config.database_manager.path_structure,
        event_ext=config.database_manager.event_ext,
        executor=None)

    if make_templates:
        Logger.info("Downloading template events")
        catalog = client.get_events(starttime=database_starttime,
                                    endtime=database_endtime,
                                    **region)
        Logger.info(f"Downloaded {len(catalog)} events")
        Logger.info("Building template database")
        template_bank.make_templates(catalog=catalog,
                                     client=client,
                                     **config.template)
    else:
        template_bank.update_index()
    tribe = template_bank.get_templates(starttime=database_starttime,
                                        endtime=database_endtime,
                                        **region)
    inventory = get_inventory(client,
                              tribe,
                              triggering_event=triggering_event,
                              max_distance=config.rt_match_filter.max_distance,
                              n_stations=config.rt_match_filter.n_stations)

    config.plot.update({"offline": True})  # Use to use data time-stamps

    Logger.info("Downloading data")
    wavebank = WaveBank("simulation_wavebank")
    for network in inventory:
        for station in network:
            for channel in station:
                try:
                    st = client.get_waveforms(
                        network=network.code,
                        station=station.code,
                        channel=channel.code,
                        location=channel.location_code,
                        starttime=trigger_origin.time - 60.,
                        endtime=trigger_origin.time + detection_runtime)
                except Exception as e:
                    Logger.error("Could not download data for "
                                 f"{network.code}.{station.code}."
                                 f"{channel.location_code}.{channel.code}")
                    Logger.error(e)
                    continue
                wavebank.put_waveforms(st)

    # Set up config to use the wavebank rather than FDSN.
    config.streaming.update({
        "rt_client_url": str(wavebank.bank_path),
        "rt_client_type": "obsplus",
        "starttime": trigger_origin.time - 60,
        "speed_up": speed_up,
        "query_interval": 1.0
    })

    listener = CatalogListener(client=client,
                               catalog_lookup_kwargs=region,
                               template_bank=template_bank,
                               interval=query_interval,
                               keep=86400,
                               catalog=None,
                               waveform_client=client)
    listener._speed_up = speed_up
    listener._test_start_step = UTCDateTime.now() - trigger_origin.time
    listener._test_start_step += 60  # Start up 1 minute before the event

    reactor = Reactor(client=client,
                      listener=listener,
                      trigger_func=trigger_func,
                      template_database=template_bank,
                      config=config)
    Logger.info("Starting reactor")
    reactor.run(max_run_length=config.reactor.max_run_length)