Exemplo n.º 1
0
async def test_process_gives_diagnostic_data(
    create_train_load_and_process_diet: Callable[..., Message],
    default_execution_context: ExecutionContext,
    should_add_diagnostic_data: bool,
):
    default_execution_context.should_add_diagnostic_data = should_add_diagnostic_data
    default_execution_context.node_name = "DIETClassifier_node_name"
    processed_message = create_train_load_and_process_diet({EPOCHS: 1})

    if should_add_diagnostic_data:
        # Tests if processing a message returns attention weights as numpy array.
        diagnostic_data = processed_message.get(DIAGNOSTIC_DATA)

        # DIETClassifier should add attention weights
        name = "DIETClassifier_node_name"
        assert isinstance(diagnostic_data, dict)
        assert name in diagnostic_data
        assert "attention_weights" in diagnostic_data[name]
        assert isinstance(diagnostic_data[name].get("attention_weights"),
                          np.ndarray)
        assert "text_transformed" in diagnostic_data[name]
        assert isinstance(diagnostic_data[name].get("text_transformed"),
                          np.ndarray)
    else:
        assert DIAGNOSTIC_DATA not in processed_message.data
Exemplo n.º 2
0
async def test_process_gives_diagnostic_data(
    default_execution_context: ExecutionContext,
    create_response_selector: Callable[[Dict[Text, Any]], ResponseSelector],
    train_and_preprocess: Callable[..., Tuple[TrainingData,
                                              List[GraphComponent]]],
    process_message: Callable[..., Message],
):
    """Tests if processing a message returns attention weights as numpy array."""
    pipeline = [
        {
            "component": WhitespaceTokenizer
        },
        {
            "component": CountVectorsFeaturizer
        },
    ]
    config_params = {EPOCHS: 1}

    importer = RasaFileImporter(
        config_file="data/test_response_selector_bot/config.yml",
        domain_path="data/test_response_selector_bot/domain.yml",
        training_data_paths=[
            "data/test_response_selector_bot/data/rules.yml",
            "data/test_response_selector_bot/data/stories.yml",
            "data/test_response_selector_bot/data/nlu.yml",
        ],
    )
    training_data = importer.get_nlu_data()

    training_data, loaded_pipeline = train_and_preprocess(
        pipeline, training_data)

    default_execution_context.should_add_diagnostic_data = True

    response_selector = create_response_selector(config_params)
    response_selector.train(training_data=training_data)

    message = Message(data={TEXT: "hello"})
    message = process_message(loaded_pipeline, message)

    classified_message = response_selector.process([message])[0]
    diagnostic_data = classified_message.get(DIAGNOSTIC_DATA)

    assert isinstance(diagnostic_data, dict)
    for _, values in diagnostic_data.items():
        assert "text_transformed" in values
        assert isinstance(values.get("text_transformed"), np.ndarray)
        # The `attention_weights` key should exist, regardless of there
        # being a transformer
        assert "attention_weights" in values
        # By default, ResponseSelector has `number_of_transformer_layers = 0`
        # in which case the attention weights should be None.
        assert values.get("attention_weights") is None