def test_sql_broker_from_config(): cfg = read_endpoint_config( "data/test_endpoints/event_brokers/sql_endpoint.yml", "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, SQLEventBroker) assert actual.engine.name == "sqlite"
def test_file_broker_from_config(): cfg = read_endpoint_config( "data/test_endpoints/event_brokers/file_endpoint.yml", "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, FileEventBroker) assert actual.path == "rasa_event.log"
async def test_events_schema( monkeypatch: MonkeyPatch, default_agent: Agent, config_path: Text ): # this allows us to patch the printing part used in debug mode to collect the # reported events monkeypatch.setenv("RASA_TELEMETRY_DEBUG", "true") monkeypatch.setenv("RASA_TELEMETRY_ENABLED", "true") mock = Mock() monkeypatch.setattr(telemetry, "print_telemetry_event", mock) with open(TELEMETRY_EVENTS_JSON) as f: schemas = json.load(f)["events"] initial = asyncio.all_tasks() # Generate all known backend telemetry events, and then use events.json to # validate their schema. training_data = TrainingDataImporter.load_from_config(config_path) with telemetry.track_model_training(training_data, "rasa"): await asyncio.sleep(1) telemetry.track_telemetry_disabled() telemetry.track_data_split(0.5, "nlu") telemetry.track_validate_files(True) telemetry.track_data_convert("yaml", "nlu") telemetry.track_tracker_export(5, TrackerStore(domain=None), EventBroker()) telemetry.track_interactive_learning_start(True, False) telemetry.track_server_start([CmdlineInput()], None, None, 42, True) telemetry.track_project_init("tests/") telemetry.track_shell_started("nlu") telemetry.track_rasa_x_local() telemetry.track_visualization() telemetry.track_core_model_test(5, True, default_agent) telemetry.track_nlu_model_test(TrainingData()) pending = asyncio.all_tasks() - initial await asyncio.gather(*pending) assert mock.call_count == 15 for args, _ in mock.call_args_list: event = args[0] # `metrics_id` automatically gets added to all event but is # not part of the schema so we need to remove it before validation del event["properties"]["metrics_id"] jsonschema.validate( instance=event["properties"], schema=schemas[event["event"]] )
async def load_agent_on_start( model_path: Text, endpoints: AvailableEndpoints, remote_storage: Optional[Text], app: Sanic, loop: AbstractEventLoop, ): """Load an agent. Used to be scheduled on server start (hence the `app` and `loop` arguments).""" # noinspection PyBroadException try: with model.get_model(model_path) as unpacked_model: _, nlu_model = model.get_model_subdirectories(unpacked_model) _interpreter = NaturalLanguageInterpreter.create(endpoints.nlu or nlu_model) except Exception: logger.debug(f"Could not load interpreter from '{model_path}'.") _interpreter = None _broker = EventBroker.create(endpoints.event_broker) _tracker_store = TrackerStore.create(endpoints.tracker_store, event_broker=_broker) _lock_store = LockStore.create(endpoints.lock_store) model_server = endpoints.model if endpoints and endpoints.model else None try: app.agent = await agent.load_agent( model_path, model_server=model_server, remote_storage=remote_storage, interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, lock_store=_lock_store, action_endpoint=endpoints.action, ) except Exception as e: rasa.shared.utils.io.raise_warning( f"The model at '{model_path}' could not be loaded. " f"Error: {e}" ) app.agent = None if not app.agent: rasa.shared.utils.io.raise_warning( "Agent could not be loaded with the provided configuration. " "Load default agent without any model." ) app.agent = Agent( interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action, model_server=model_server, remote_storage=remote_storage, ) logger.info("Rasa server is up and running.") return app.agent
def test_load_custom_broker_name(tmp_path: Path): config = EndpointConfig( **{ "type": "rasa.core.brokers.file.FileEventBroker", "path": str(tmp_path / "rasa_event.log"), }) assert EventBroker.create(config)
def create_agent(model: Text, endpoints: Text = None) -> "Agent": """Create an agent instance based on a stored model. Args: model: file path to the stored model endpoints: file path to the used endpoint configuration """ from rasa.core.tracker_store import TrackerStore from rasa.core.utils import AvailableEndpoints from rasa.core.brokers.broker import EventBroker import rasa.utils.common _endpoints = AvailableEndpoints.read_endpoints(endpoints) _broker = rasa.utils.common.run_in_loop(EventBroker.create(_endpoints.event_broker)) _tracker_store = TrackerStore.create(_endpoints.tracker_store, event_broker=_broker) _lock_store = LockStore.create(_endpoints.lock_store) return Agent.load( model, generator=_endpoints.nlg, tracker_store=_tracker_store, lock_store=_lock_store, action_endpoint=_endpoints.action, )
async def load_agent_on_start( model_path: Text, endpoints: AvailableEndpoints, remote_storage: Optional[Text], app: Sanic, loop: Text, ): """Load an agent. Used to be scheduled on server start (hence the `app` and `loop` arguments).""" # noinspection PyBroadException # bf mod try: with model.get_model(model_path) as unpacked_model: _, nlu_models = model.get_model_subdirectories(unpacked_model) _interpreter = {} for lang, nlu_model in nlu_models.items(): _interpreter[lang] = NaturalLanguageInterpreter.create( endpoints.nlu or nlu_model) except Exception: logger.debug(f"Could not load interpreter from '{model_path}'.") _interpreter = {} # /bf mod _broker = EventBroker.create(endpoints.event_broker) _tracker_store = TrackerStore.create(endpoints.tracker_store, event_broker=_broker) _lock_store = LockStore.create(endpoints.lock_store) model_server = endpoints.model if endpoints and endpoints.model else None app.agent = await agent.load_agent( model_path, model_server=model_server, remote_storage=remote_storage, interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, lock_store=_lock_store, action_endpoint=endpoints.action, ) if not app.agent: raise_warning( "Agent could not be loaded with the provided configuration. " "Load default agent without any model.") app.agent = Agent( interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action, model_server=model_server, remote_storage=remote_storage, ) return app.agent
def _load_endpoints(self, endpoints: Optional[Text] = None): """加载enpoints文件""" endpoints = AvailableEndpoints.read_endpoints(endpoints) broker = EventBroker.create(endpoints.event_broker) self.tracker_store = TrackerStore.create(endpoints.tracker_store, event_broker=broker) self.generator = endpoints.nlg self.action_endpoint = endpoints.action self.lock_store = LockStore.create(endpoints.lock_store)
def test_pika_broker_from_config(): cfg = read_endpoint_config( "data/test_endpoints/event_brokers/pika_endpoint.yml", "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, PikaEventBroker) assert actual.host == "localhost" assert actual.username == "username" assert actual.queues == ["queue-1"]
def test_dashbot_config(): cfg = read_endpoint_config( os.path.join(os.path.dirname(__file__), "data/rasa_endpoints.yml"), "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, rasa) assert actual.proxies['http'] == 'http://10.10.1.10:3128' assert actual.proxies['https'] == 'http://10.10.1.10:1080' assert actual.apiKey == 'here'
def test_pika_broker_from_config(monkeypatch: MonkeyPatch): # patch PikaEventBroker so it doesn't try to connect to RabbitMQ on init monkeypatch.setattr(PikaEventBroker, "_connect", lambda _: None) cfg = read_endpoint_config( "data/test_endpoints/event_brokers/pika_endpoint.yml", "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, PikaEventBroker) assert actual.host == "localhost" assert actual.username == "username" assert actual.queues == ["queue-1"]
def test_file_broker_logs_to_file(tmpdir): fname = tmpdir.join("events.log").strpath actual = EventBroker.create(EndpointConfig(**{"type": "file", "path": fname})) for e in TEST_EVENTS: actual.publish(e.as_dict()) # reading the events from the file one event per line recovered = [] with open(fname, "r") as f: for l in f: recovered.append(Event.from_parameters(json.loads(l))) assert recovered == TEST_EVENTS
def test_file_broker_from_config(tmp_path: Path): # backslashes need to be encoded (windows...) otherwise we run into unicode issues path = str(tmp_path / "rasa_test_event.log").replace("\\", "\\\\") endpoint_config = textwrap.dedent(f""" event_broker: path: "{path}" type: "file" """) rasa.utils.io.write_text_file(endpoint_config, tmp_path / "endpoint.yml") cfg = read_endpoint_config(str(tmp_path / "endpoint.yml"), "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, FileEventBroker) assert actual.path.endswith("rasa_test_event.log")
def test_file_broker_properly_logs_newlines(tmpdir): fname = tmpdir.join("events.log").strpath actual = EventBroker.create(EndpointConfig(**{"type": "file", "path": fname})) event_with_newline = UserUttered("hello \n there") actual.publish(event_with_newline.as_dict()) # reading the events from the file one event per line recovered = [] with open(fname, "r") as f: for l in f: recovered.append(Event.from_parameters(json.loads(l))) assert recovered == [event_with_newline]
async def _load_agent( model_path: Optional[Text] = None, model_server: Optional[EndpointConfig] = None, remote_storage: Optional[Text] = None, endpoints: Optional[AvailableEndpoints] = None, lock_store: Optional[LockStore] = None, ) -> Agent: try: tracker_store = None generator = None action_endpoint = None if endpoints: broker = EventBroker.create(endpoints.event_broker) tracker_store = TrackerStore.create(endpoints.tracker_store, event_broker=broker) generator = endpoints.nlg action_endpoint = endpoints.action if not lock_store: lock_store = LockStore.create(endpoints.lock_store) loaded_agent = await agent.load_agent( model_path, model_server, remote_storage, generator=generator, tracker_store=tracker_store, lock_store=lock_store, action_endpoint=action_endpoint, ) except Exception as e: logger.debug(traceback.format_exc()) raise ErrorResponse(500, "LoadingError", f"An unexpected error occurred. Error: {e}") if not loaded_agent: raise ErrorResponse( 400, "BadRequest", f"Agent with name '{model_path}' could not be loaded.", { "parameter": "model", "in": "query" }, ) return loaded_agent
def test_sql_broker_logs_to_sql_db(): cfg = read_endpoint_config( "data/test_endpoints/event_brokers/sql_endpoint.yml", "event_broker") actual = EventBroker.create(cfg) assert isinstance(actual, SQLEventBroker) for e in TEST_EVENTS: actual.publish(e.as_dict()) with actual.session_scope() as session: events_types = [ json.loads(event.data)["event"] for event in session.query(actual.SQLBrokerEvent).all() ] assert events_types == ["user", "slot", "restart"]
def test_file_broker_logs_to_file(tmp_path: Path): log_file_path = str(tmp_path / "events.log") actual = EventBroker.create( EndpointConfig(**{ "type": "file", "path": log_file_path })) for e in TEST_EVENTS: actual.publish(e.as_dict()) # reading the events from the file one event per line recovered = [] with open(log_file_path, "r") as log_file: for line in log_file: recovered.append(Event.from_parameters(json.loads(line))) assert recovered == TEST_EVENTS
def create_agent(model: Text, endpoints: Text = None) -> "Agent": from rasa.core.tracker_store import TrackerStore from rasa.core.utils import AvailableEndpoints from rasa.core.agent import Agent from rasa.core.brokers.broker import EventBroker _endpoints = AvailableEndpoints.read_endpoints(endpoints) _broker = EventBroker.create(_endpoints.event_broker) _tracker_store = TrackerStore.create(_endpoints.tracker_store, event_broker=_broker) _lock_store = LockStore.create(_endpoints.lock_store) return Agent.load( model, generator=_endpoints.nlg, tracker_store=_tracker_store, lock_store=_lock_store, action_endpoint=_endpoints.action, )
def test_file_broker_properly_logs_newlines(tmp_path): log_file_path = str(tmp_path / "events.log") actual = EventBroker.create( EndpointConfig(**{ "type": "file", "path": log_file_path })) event_with_newline = UserUttered("hello \n there") actual.publish(event_with_newline.as_dict()) # reading the events from the file one event per line recovered = [] with open(log_file_path, "r") as log_file: for line in log_file: recovered.append(Event.from_parameters(json.loads(line))) assert recovered == [event_with_newline]
def _get_event_broker(endpoints: "AvailableEndpoints") -> Optional["EventBroker"]: """Get `EventBroker` from `endpoints`. Prints an error and exits if no event broker could be loaded. Args: endpoints: `AvailableEndpoints` to initialize the event broker from. Returns: Initialized event broker. """ if not endpoints.event_broker: cli_utils.print_error_and_exit( f"Could not find an `event_broker` section in the supplied " f"endpoints file. Instructions on how to configure an event broker " f"can be found here: {DOCS_URL_EVENT_BROKERS}. Exiting." ) from rasa.core.brokers.broker import EventBroker return EventBroker.create(endpoints.event_broker)
def test_no_broker_in_config(): cfg = read_endpoint_config(DEFAULT_ENDPOINTS_FILE, "event_broker") actual = EventBroker.create(cfg) assert actual is None
def test_load_non_existent_custom_broker_name(): config = EndpointConfig(**{"type": "rasa.core.brokers.my.MyProducer"}) assert EventBroker.create(config) is None
def test_load_custom_broker_name(): config = EndpointConfig(**{"type": "rasa.core.brokers.file.FileEventBroker"}) assert EventBroker.create(config)