Exemple #1
0
def test_pipeline(document_store, retriever):
    documents = [
        {
            "name": "name_1",
            "text": "text_1",
            "embedding": np.random.rand(768).astype(np.float32)
        },
        {
            "name": "name_2",
            "text": "text_2",
            "embedding": np.random.rand(768).astype(np.float32)
        },
        {
            "name": "name_3",
            "text": "text_3",
            "embedding": np.random.rand(768).astype(np.float64)
        },
        {
            "name": "name_4",
            "text": "text_4",
            "embedding": np.random.rand(768).astype(np.float32)
        },
    ]
    document_store.write_documents(documents)
    pipeline = Pipeline()
    pipeline.add_node(component=retriever, name="FAISS", inputs=["Query"])
    output = pipeline.run(query="How to test this?", top_k_retriever=3)
    assert len(output["documents"]) == 3
Exemple #2
0
def test_load_yaml(document_store_with_docs):

    # # test correct load from yaml
    pipeline = Pipeline.load_from_yaml(Path("samples/pipeline/test_pipeline.yaml", pipeline_name="my_query"))
    prediction = pipeline.run(query="Who lives in Berlin?", top_k_retriever=10, top_k_reader=3)
    assert prediction["query"] == "Who lives in Berlin?"
    assert prediction["answers"][0]["answer"] == "Carla"

    # test invalid pipeline name
    with pytest.raises(Exception):
        Pipeline.load_from_yaml(path=Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="invalid")
Exemple #3
0
def test_graph_creation(reader, retriever_with_docs, document_store_with_docs):
    pipeline = Pipeline()
    pipeline.add_node(name="ES", component=retriever_with_docs, inputs=["Query"])

    with pytest.raises(AssertionError):
        pipeline.add_node(name="Reader", component=retriever_with_docs, inputs=["ES.output_2"])

    with pytest.raises(AssertionError):
        pipeline.add_node(name="Reader", component=retriever_with_docs, inputs=["ES.wrong_edge_label"])

    with pytest.raises(Exception):
        pipeline.add_node(name="Reader", component=retriever_with_docs, inputs=["InvalidNode"])
Exemple #4
0
def test_load_yaml(document_store_with_docs):
    # test correct load of indexing pipeline from yaml
    pipeline = Pipeline.load_from_yaml(Path("samples/pipeline/test_pipeline.yaml"),
                                       pipeline_name="test_indexing_pipeline")
    pipeline.run(file_path=Path("samples/pdf/sample_pdf_1.pdf"), top_k_retriever=10, top_k_reader=3)

    # test correct load of query pipeline from yaml
    pipeline = Pipeline.load_from_yaml(Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="test_query_pipeline")
    prediction = pipeline.run(query="Who made the PDF specification?", top_k_retriever=10, top_k_reader=3)
    assert prediction["query"] == "Who made the PDF specification?"
    assert prediction["answers"][0]["answer"] == "Adobe Systems"

    # test invalid pipeline name
    with pytest.raises(Exception):
        Pipeline.load_from_yaml(path=Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="invalid")
def test_load_and_save_yaml(document_store_with_docs, tmp_path):
    # test correct load of indexing pipeline from yaml
    pipeline = Pipeline.load_from_yaml(Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="indexing_pipeline")
    pipeline.run(file_path=Path("samples/pdf/sample_pdf_1.pdf"), top_k_retriever=10, top_k_reader=3)

    # test correct load of query pipeline from yaml
    pipeline = Pipeline.load_from_yaml(Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="query_pipeline")
    prediction = pipeline.run(query="Who made the PDF specification?", top_k_retriever=10, top_k_reader=3)
    assert prediction["query"] == "Who made the PDF specification?"
    assert prediction["answers"][0]["answer"] == "Adobe Systems"

    # test invalid pipeline name
    with pytest.raises(Exception):
        Pipeline.load_from_yaml(path=Path("samples/pipeline/test_pipeline.yaml"), pipeline_name="invalid")

    # test config export
    pipeline.save_to_yaml(tmp_path / "test.yaml")
    with open(tmp_path/"test.yaml", "r", encoding='utf-8') as stream:
        saved_yaml = stream.read()
    expected_yaml = '''
        components:
        - name: ESRetriever
          params:
            document_store: ElasticsearchDocumentStore
          type: ElasticsearchRetriever
        - name: ElasticsearchDocumentStore
          params:
            index: haystack_test_document
            label_index: haystack_test_label
          type: ElasticsearchDocumentStore
        - name: Reader
          params:
            model_name_or_path: deepset/roberta-base-squad2
            no_ans_boost: -10
          type: FARMReader
        pipelines:
        - name: query
          nodes:
          - inputs:
            - Query
            name: ESRetriever
          - inputs:
            - ESRetriever
            name: Reader
          type: Query
        version: '0.8'
    '''
    assert saved_yaml.replace(" ", "").replace("\n", "") == expected_yaml.replace(" ", "").replace("\n", "")
Exemple #6
0
def test_join_document_pipeline(document_store_with_docs, reader):
    es = ElasticsearchRetriever(document_store=document_store_with_docs)
    dpr = DensePassageRetriever(
        document_store=document_store_with_docs,
        query_embedding_model="facebook/dpr-question_encoder-single-nq-base",
        passage_embedding_model="facebook/dpr-ctx_encoder-single-nq-base",
        use_gpu=False,
    )
    document_store_with_docs.update_embeddings(dpr)

    query = "Where does Carla lives?"

    # test merge without weights
    join_node = JoinDocuments(join_mode="merge")
    p = Pipeline()
    p.add_node(component=es, name="R1", inputs=["Query"])
    p.add_node(component=dpr, name="R2", inputs=["Query"])
    p.add_node(component=join_node, name="Join", inputs=["R1", "R2"])
    results = p.run(query=query)
    assert len(results["documents"]) == 3

    # test merge with weights
    join_node = JoinDocuments(join_mode="merge",
                              weights=[1000, 1],
                              top_k_join=2)
    p = Pipeline()
    p.add_node(component=es, name="R1", inputs=["Query"])
    p.add_node(component=dpr, name="R2", inputs=["Query"])
    p.add_node(component=join_node, name="Join", inputs=["R1", "R2"])
    results = p.run(query=query)
    assert results["documents"][0].score > 1000
    assert len(results["documents"]) == 2

    # test concatenate
    join_node = JoinDocuments(join_mode="concatenate")
    p = Pipeline()
    p.add_node(component=es, name="R1", inputs=["Query"])
    p.add_node(component=dpr, name="R2", inputs=["Query"])
    p.add_node(component=join_node, name="Join", inputs=["R1", "R2"])
    results = p.run(query=query)
    assert len(results["documents"]) == 3

    # test join_node with reader
    join_node = JoinDocuments()
    p = Pipeline()
    p.add_node(component=es, name="R1", inputs=["Query"])
    p.add_node(component=dpr, name="R2", inputs=["Query"])
    p.add_node(component=join_node, name="Join", inputs=["R1", "R2"])
    p.add_node(component=reader, name="Reader", inputs=["Join"])
    results = p.run(query=query)
    assert results["answers"][0]["answer"] == "Berlin"
Exemple #7
0
def test_query_keyword_statement_classifier():
    class KeywordOutput(RootNode):
        outgoing_edges = 2

        def run(self, **kwargs):
            kwargs["output"] = "keyword"
            return kwargs, "output_1"

    class QuestionOutput(RootNode):
        outgoing_edges = 2

        def run(self, **kwargs):
            kwargs["output"] = "question"
            return kwargs, "output_2"

    pipeline = Pipeline()
    pipeline.add_node(
        name="SkQueryKeywordQuestionClassifier",
        component=SklearnQueryClassifier(),
        inputs=["Query"],
    )
    pipeline.add_node(
        name="KeywordNode",
        component=KeywordOutput(),
        inputs=["SkQueryKeywordQuestionClassifier.output_2"],
    )
    pipeline.add_node(
        name="QuestionNode",
        component=QuestionOutput(),
        inputs=["SkQueryKeywordQuestionClassifier.output_1"],
    )
    output = pipeline.run(query="morse code")
    assert output["output"] == "keyword"

    output = pipeline.run(query="How old is John?")
    assert output["output"] == "question"

    pipeline = Pipeline()
    pipeline.add_node(
        name="TfQueryKeywordQuestionClassifier",
        component=TransformersQueryClassifier(),
        inputs=["Query"],
    )
    pipeline.add_node(
        name="KeywordNode",
        component=KeywordOutput(),
        inputs=["TfQueryKeywordQuestionClassifier.output_2"],
    )
    pipeline.add_node(
        name="QuestionNode",
        component=QuestionOutput(),
        inputs=["TfQueryKeywordQuestionClassifier.output_1"],
    )
    output = pipeline.run(query="morse code")
    assert output["output"] == "keyword"

    output = pipeline.run(query="How old is John?")
    assert output["output"] == "question"
Exemple #8
0
def test_parallel_paths_in_pipeline_graph_with_branching():
    class AWithOutput1(RootNode):
        outgoing_edges = 2

        def run(self, **kwargs):
            kwargs["output"] = "A"
            return kwargs, "output_1"

    class AWithOutput2(RootNode):
        outgoing_edges = 2

        def run(self, **kwargs):
            kwargs["output"] = "A"
            return kwargs, "output_2"

    class AWithOutputAll(RootNode):
        outgoing_edges = 2

        def run(self, **kwargs):
            kwargs["output"] = "A"
            return kwargs, "output_all"

    class B(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "B"
            return kwargs, "output_1"

    class C(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "C"
            return kwargs, "output_1"

    class D(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "D"
            return kwargs, "output_1"

    class E(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "E"
            return kwargs, "output_1"

    class JoinNode(RootNode):
        def run(self, **kwargs):
            if kwargs.get("inputs"):
                kwargs["output"] = ""
                for input_dict in kwargs["inputs"]:
                    kwargs["output"] += input_dict["output"]
            return kwargs, "output_1"

    pipeline = Pipeline()
    pipeline.add_node(name="A", component=AWithOutput1(), inputs=["Query"])
    pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
    pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
    pipeline.add_node(name="D", component=E(), inputs=["B"])
    pipeline.add_node(name="E", component=D(), inputs=["B"])
    pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
    output = pipeline.run(query="test")
    assert output["output"] == "ABEABD"

    pipeline = Pipeline()
    pipeline.add_node(name="A", component=AWithOutput2(), inputs=["Query"])
    pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
    pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
    pipeline.add_node(name="D", component=E(), inputs=["B"])
    pipeline.add_node(name="E", component=D(), inputs=["B"])
    pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
    output = pipeline.run(query="test")
    assert output["output"] == "AC"

    pipeline = Pipeline()
    pipeline.add_node(name="A", component=AWithOutputAll(), inputs=["Query"])
    pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
    pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
    pipeline.add_node(name="D", component=E(), inputs=["B"])
    pipeline.add_node(name="E", component=D(), inputs=["B"])
    pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
    output = pipeline.run(query="test")
    assert output["output"] == "ACABEABD"
Exemple #9
0
def test_parallel_paths_in_pipeline_graph():
    class A(RootNode):
        def run(self, **kwargs):
            kwargs["output"] = "A"
            return kwargs, "output_1"

    class B(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "B"
            return kwargs, "output_1"

    class C(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "C"
            return kwargs, "output_1"

    class D(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "D"
            return kwargs, "output_1"

    class E(RootNode):
        def run(self, **kwargs):
            kwargs["output"] += "E"
            return kwargs, "output_1"

    class JoinNode(RootNode):
        def run(self, **kwargs):
            kwargs["output"] = (kwargs["inputs"][0]["output"] +
                                kwargs["inputs"][1]["output"])
            return kwargs, "output_1"

    pipeline = Pipeline()
    pipeline.add_node(name="A", component=A(), inputs=["Query"])
    pipeline.add_node(name="B", component=B(), inputs=["A"])
    pipeline.add_node(name="C", component=C(), inputs=["B"])
    pipeline.add_node(name="E", component=E(), inputs=["C"])
    pipeline.add_node(name="D", component=D(), inputs=["B"])
    pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E"])
    output = pipeline.run(query="test")
    assert output["output"] == "ABDABCE"

    pipeline = Pipeline()
    pipeline.add_node(name="A", component=A(), inputs=["Query"])
    pipeline.add_node(name="B", component=B(), inputs=["A"])
    pipeline.add_node(name="C", component=C(), inputs=["B"])
    pipeline.add_node(name="D", component=D(), inputs=["B"])
    pipeline.add_node(name="E", component=JoinNode(), inputs=["C", "D"])
    output = pipeline.run(query="test")
    assert output["output"] == "ABCABD"
Exemple #10
0
import os
import shutil
import uuid
from pathlib import Path
from typing import Optional, List

from fastapi import APIRouter, UploadFile, File, Form, HTTPException

from haystack.pipeline import Pipeline
from rest_api.config import PIPELINE_YAML_PATH, FILE_UPLOAD_PATH, INDEXING_PIPELINE_NAME

logger = logging.getLogger(__name__)
router = APIRouter()

try:
    INDEXING_PIPELINE = Pipeline.load_from_yaml(
        Path(PIPELINE_YAML_PATH), pipeline_name=INDEXING_PIPELINE_NAME)
except KeyError:
    INDEXING_PIPELINE = None
    logger.info(
        "Indexing Pipeline not found in the YAML configuration. File Upload API will not be available."
    )

os.makedirs(FILE_UPLOAD_PATH,
            exist_ok=True)  # create directory for uploading files


@router.post("/file-upload")
def file_upload(
        files: List[UploadFile] = File(...),
        meta: Optional[str] = Form("null"),  # JSON serialized string
        remove_numeric_tables: Optional[bool] = Form(None),