def test_when_analyze_then_apptracer_has_value(loaded_registry, unit_test_guid,
                                               nlp_engine):
    text = "My name is Bart Simpson, and Credit card: 4095-2609-9393-4932,  my phone is 425 8829090"  # noqa E501
    language = "en"
    entities = ["CREDIT_CARD", "PHONE_NUMBER", "PERSON"]
    app_tracer_mock = AppTracerMock(enable_interpretability=True)
    analyzer_engine_with_spacy = AnalyzerEngine(
        loaded_registry,
        app_tracer=app_tracer_mock,
        enable_trace_pii=True,
        nlp_engine=nlp_engine,
    )
    results = analyzer_engine_with_spacy.analyze(
        correlation_id=unit_test_guid,
        text=text,
        entities=entities,
        language=language,
        all_fields=False,
        trace=True,
    )
    assert len(results) == 3
    for result in results:
        assert result.analysis_explanation is not None
    assert app_tracer_mock.get_msg_counter() == 2
    assert app_tracer_mock.get_last_trace() is not None
Exemple #2
0
def test_given_text_with_pii_using_package_then_analyze_and_anonymize_complete_successfully():
    text_to_test = "John Smith drivers license is AC432223"

    expected_response = [RecognizerResult("PERSON", 0, 10, 0.85),
                         RecognizerResult("US_DRIVER_LICENSE", 30, 38, 0.6499999999999999)
                         ]
    # Create configuration containing engine name and models
    configuration = {
        "nlp_engine_name": "spacy",
        "models": [{"lang_code": "en", "model_name": "en_core_web_sm"}],
    }

    # Create NLP engine based on configuration
    provider = NlpEngineProvider(nlp_configuration=configuration)
    nlp_engine = provider.create_engine()

    # Pass the created NLP engine and supported_languages to the AnalyzerEngine
    analyzer = AnalyzerEngine(
        nlp_engine=nlp_engine,
        supported_languages=["en"]
    )
    analyzer_results = analyzer.analyze(text_to_test, "en")
    for i in range(len(analyzer_results)):
        assert analyzer_results[i] == expected_response[i]

    expected_response = AnonymizerResult(text="<PERSON> drivers license is <US_DRIVER_LICENSE>")
    expected_response.add_item(AnonymizedEntity("replace", "US_DRIVER_LICENSE", 28, 47, "<US_DRIVER_LICENSE>"))
    expected_response.add_item(AnonymizedEntity("replace", "PERSON", 0, 8, "<PERSON>"))

    anonymizer = AnonymizerEngine()
    anonymizer_results = anonymizer.anonymize(text_to_test, analyzer_results)
    assert anonymizer_results == expected_response
Exemple #3
0
 def test_get_recognizers_returns_predefined(self):
     analyze_engine = AnalyzerEngine(registry=RecognizerRegistry(),
                                     nlp_engine=loaded_spacy_nlp_engine)
     request = RecognizersAllRequest(language="en")
     response = analyze_engine.GetAllRecognizers(request, None)
     # there are 15 predefined recognizers that detect the 17 entities
     assert len(response) == 15
    def __init__(
        self,
        analyzer_engine: Optional[AnalyzerEngine] = None,
        entities_to_keep: List[str] = None,
        verbose: bool = False,
        labeling_scheme: str = "BIO",
        score_threshold: float = 0.4,
        language: str = "en",
        entity_mapping: Optional[Dict[str, str]] = None,
    ):
        """
        Evaluation wrapper for the Presidio Analyzer
        :param analyzer_engine: object of type AnalyzerEngine (from presidio-analyzer)
        """
        super().__init__(
            entities_to_keep=entities_to_keep,
            verbose=verbose,
            labeling_scheme=labeling_scheme,
            entity_mapping=entity_mapping,
        )
        self.score_threshold = score_threshold
        self.language = language

        if not analyzer_engine:
            analyzer_engine = AnalyzerEngine()
            self._update_recognizers_based_on_entities_to_keep(analyzer_engine)
        self.analyzer_engine = analyzer_engine
Exemple #5
0
def test_when_read_test_spacy_nlp_conf_file_then_returns_spacy_nlp_engine(
    mock_registry,
):
    engine = AnalyzerEngine(registry=mock_registry)

    assert isinstance(engine.nlp_engine, SpacyNlpEngine)
    assert engine.nlp_engine.nlp is not None
Exemple #6
0
    def test_added_pattern_recognizer_works(self):
        pattern = Pattern("rocket pattern", r'\W*(rocket)\W*', 0.8)
        pattern_recognizer = PatternRecognizer("ROCKET",
                                               name="Rocket recognizer",
                                               patterns=[pattern])

        # Make sure the analyzer doesn't get this entity
        recognizers_store_api_mock = RecognizerStoreApiMock()
        analyze_engine = AnalyzerEngine(
            registry=MockRecognizerRegistry(recognizers_store_api_mock),
            nlp_engine=MockNlpEngine())
        text = "rocket is my favorite transportation"
        entities = ["CREDIT_CARD", "ROCKET"]

        results = analyze_engine.analyze(self.unit_test_guid,
                                         text=text,
                                         entities=entities,
                                         language='en',
                                         all_fields=False)

        assert len(results) == 0

        # Add a new recognizer for the word "rocket" (case insensitive)
        recognizers_store_api_mock.add_custom_pattern_recognizer(
            pattern_recognizer)

        # Check that the entity is recognized:
        results = analyze_engine.analyze(self.unit_test_guid,
                                         text=text,
                                         entities=entities,
                                         language='en',
                                         all_fields=False)

        assert len(results) == 1
        assert_result(results[0], "ROCKET", 0, 7, 0.8)
Exemple #7
0
    def test_demo_text(self):
        text = "Here are a few examples sentences we currently support:\n\n" \
               "Hello, my name is David Johnson and I live in Maine.\n" \
               "My credit card number is 4095-2609-9393-4932 and my " \
               "Crypto wallet id is 16Yeky6GMjeNkAiNcBY7ZhrLoMSgg1BoyZ.\n\n" \
               "On September 18 I visited microsoft.com and sent an " \
               "email to [email protected],  from the IP 192.168.0.1.\n\n" \
               "My passport: 991280345 and my phone number: (212) 555-1234.\n\n" \
               "Please transfer using this IBAN IL150120690000003111111.\n\n" \
               "Can you please check the status on bank account 954567876544 " \
               "in PresidiBank?\n\n" \
               "" \
               "Kate's social security number is 078-05-1120.  " \
               "Her driver license? it is 9234567B.\n\n" \
               "" \
               "This project welcomes contributions and suggestions.\n" \
               "Most contributions require you to agree to a " \
               "Contributor License Agreement (CLA) declaring " \
               "that you have the right to, and actually do, " \
               "grant us the rights to use your contribution. " \
               "For details, visit https://cla.microsoft.com " \
               "When you submit a pull request, " \
               "a CLA-bot will automatically determine whether " \
               "you need to provide a CLA and decorate the PR " \
               "appropriately (e.g., label, comment).\n" \
               "Simply follow the instructions provided by the bot. " \
               "You will only need to do this once across all repos using our CLA.\n" \
               "This project has adopted the Microsoft Open Source Code of Conduct.\n" \
               "For more information see the Code of Conduct FAQ or " \
               "contact [email protected] with any additional questions or comments."

        language = "en"

        analyzer_engine = AnalyzerEngine(default_score_threshold=0.35, nlp_engine=loaded_spacy_nlp_engine)
        results = analyzer_engine.analyze(correlation_id=self.unit_test_guid, text=text, entities=None,
                                          language=language, all_fields=True)
        for result in results:
            logger.info("Entity = {}, Text = {}, Score={}, Start={}, End={}".format(result.entity_type,
                                                                                    text[result.start:result.end],
                                                                                    result.score,
                                                                                    result.start, result.end))
        detected_entities = [result.entity_type for result in results]

        assert len([entity for entity in detected_entities if entity == "CREDIT_CARD"]) == 1
        assert len([entity for entity in detected_entities if entity == "CRYPTO"]) == 1
        assert len([entity for entity in detected_entities if entity == "DATE_TIME"]) == 1
        assert len([entity for entity in detected_entities if entity == "DOMAIN_NAME"]) == 4
        assert len([entity for entity in detected_entities if entity == "EMAIL_ADDRESS"]) == 2
        assert len([entity for entity in detected_entities if entity == "IBAN_CODE"]) == 1
        assert len([entity for entity in detected_entities if entity == "IP_ADDRESS"]) == 1
        assert len([entity for entity in detected_entities if entity == "LOCATION"]) == 1
        assert len([entity for entity in detected_entities if entity == "PERSON"]) == 2
        assert len([entity for entity in detected_entities if entity == "PHONE_NUMBER"]) == 1
        assert len([entity for entity in detected_entities if entity == "US_BANK_NUMBER"]) == 1
        assert len([entity for entity in detected_entities if entity == "US_DRIVER_LICENSE"]) == 1
        assert len([entity for entity in detected_entities if entity == "US_PASSPORT"]) == 1
        assert len([entity for entity in detected_entities if entity == "US_SSN"]) == 1

        assert len(results) == 19
Exemple #8
0
    def initialize(self):
        SpacyRecognizer.ENTITIES = ["PERSON"]
        Replace.NEW_VALUE = 'replace_text'
        nlp_engine = SpacyNlpEngine()
        nlp_engine.nlp['en'] = spacy.load('en_core_web_lg', disable=["parser", "tagger", "lemmatizer"])

        self.analyzer_engine = AnalyzerEngine(nlp_engine=nlp_engine)
        self.anonymizer_engine = AnonymizerEngine()
    def __init__(self, analyzer_engine: AnalyzerEngine = None, ocr: OCR = None):
        if not analyzer_engine:
            analyzer_engine = AnalyzerEngine()
        self.analyzer_engine = analyzer_engine

        if not ocr:
            ocr = TesseractOCR()
        self.ocr = ocr
def test_when_analyze_two_entities_embedded_then_return_results(nlp_engine):
    analyzer = AnalyzerEngine(nlp_engine=nlp_engine)

    # Name with driver license in it
    text = "My name is John 1234567 Doe"
    results = analyzer.analyze(text=text, language="en", score_threshold=0)

    # currently we only remove duplicates when the two have the same entity type
    assert len(results) == 2
Exemple #11
0
def loaded_analyzer_engine(loaded_registry, app_tracer):
    mock_nlp_artifacts = NlpArtifacts([], [], [], [], None, "en")
    analyzer_engine = AnalyzerEngine(
        loaded_registry,
        NlpEngineMock(stopwords=[], punct_words=[], nlp_artifacts=mock_nlp_artifacts),
        app_tracer=app_tracer,
        log_decision_process=True,
    )
    return analyzer_engine
def test_when_get_supported_fields_then_return_all_languages(
        mock_registry, unit_test_guid, nlp_engine):
    analyzer = AnalyzerEngine(registry=mock_registry, nlp_engine=nlp_engine)
    entities = analyzer.get_supported_entities()

    assert len(entities) == 3
    assert "CREDIT_CARD" in entities
    assert "DOMAIN_NAME" in entities
    assert "PHONE_NUMBER" in entities
def test_simple():
    dic = {
        "text": "John Smith drivers license is AC432223",
        "language": "en",
        "score_threshold": 0.7,
        "entities": ["CRYPTO", "NRP", "DATE_TIME", "LOCATION", "PERSON"],
    }

    analyzer = AnalyzerEngine()
    analyzer.analyze(**dic)
def test_when_allFields_is_true_full_recognizers_list_return_all_fields(
        nlp_engine):
    analyze_engine = AnalyzerEngine(registry=RecognizerRegistry(),
                                    nlp_engine=nlp_engine)
    request = AnalyzeRequest()
    request.analyzeTemplate.allFields = True
    request.text = "My name is David and I live in Seattle." "Domain: microsoft.com "
    response = analyze_engine.Apply(request, None)
    returned_entities = [field.field.name for field in response.analyzeResults]
    assert response.analyzeResults is not None
    assert "DOMAIN_NAME" in returned_entities
def test_when_allFields_is_true_and_entities_not_empty_exception():
    analyze_engine = AnalyzerEngine(registry=RecognizerRegistry(),
                                    nlp_engine=NlpEngineMock())
    request = AnalyzeRequest()
    request.text = "My name is David and I live in Seattle." "Domain: microsoft.com "
    request.analyzeTemplate.allFields = True
    new_field = request.analyzeTemplate.fields.add()
    new_field.name = "CREDIT_CARD"
    new_field.minScore = "0.5"
    with pytest.raises(ValueError):
        analyze_engine.Apply(request, None)
Exemple #16
0
 def __init__(self, *args, **kwargs):
     super(TestAnalyzerEngine, self).__init__(*args, **kwargs)
     self.loaded_registry = MockRecognizerRegistry(RecognizerStoreApiMock())
     mock_nlp_artifacts = NlpArtifacts([], [], [], [], None, "en")
     self.app_tracer = AppTracerMock(enable_interpretability=True)
     self.loaded_analyzer_engine = AnalyzerEngine(self.loaded_registry,
                                                  MockNlpEngine(stopwords=[],
                                                                punct_words=[],
                                                                nlp_artifacts=mock_nlp_artifacts),
                                                  app_tracer=self.app_tracer,
                                                  enable_trace_pii=True)
     self.unit_test_guid = "00000000-0000-0000-0000-000000000000"
def anonymize_text(text: str) -> str:
    analyzer = AnalyzerEngine()
    anonymizer = AnonymizerEngine()
    analyzer_results = analyzer.analyze(text=text, language="en")
    anonymized_results = anonymizer.anonymize(
        text=text,
        analyzer_results=analyzer_results,
        anonymizers_config={
            "DEFAULT": AnonymizerConfig("replace",
                                        {"new_value": "<ANONYMIZED>"})
        },
    )
    return anonymized_results
Exemple #18
0
    def __init__(self, **data: Any):
        super().__init__(**data)

        if not self.engine_config:
            self.engine_config = PresidioEngineConfig()

        if not self.engine_config.models or len(
                self.engine_config.models) == 0:
            self.engine_config.models = [PresidioModelConfig()]

        # If spacy engine then load Spacy models and select languages
        languages = []
        for model_config in self.engine_config.models:
            languages.append(model_config.lang_code)

            # Check SpacyNlpEngine.engine_name
            if (self.engine_config.nlp_engine_name == "spacy"
                    and model_config.model_name is not None):
                try:
                    spacy_model = __import__(model_config.model_name)
                    spacy_model.load()
                    logger.info(
                        f"Spacy model {model_config.model_name} is already downloaded"
                    )
                except:
                    logger.warning(
                        f"Spacy model {model_config.model_name} is not downloaded"
                    )
                    logger.warning(
                        f"Downloading spacy model {model_config.model_name}, it might take some time"
                    )
                    from spacy.cli import download

                    download(model_config.model_name)

        # Create NLP engine based on configuration
        provider = NlpEngineProvider(
            nlp_configuration=self.engine_config.dict())
        nlp_engine = provider.create_engine()

        # Pass the created NLP engine and supported_languages to the AnalyzerEngine
        self._analyzer = AnalyzerEngine(nlp_engine=nlp_engine,
                                        supported_languages=languages)

        # self._analyzer.registry.load_predefined_recognizers()
        if self.entity_recognizers:
            for entity_recognizer in self.entity_recognizers:
                self._analyzer.registry.add_recognizer(entity_recognizer)

        # Initialize the anonymizer with logger
        self._anonymizer = AnonymizerEngine()
def test_removed_pattern_recognizer_doesnt_work(unit_test_guid):
    pattern = Pattern("spaceship pattern", r"\W*(spaceship)\W*", 0.8)
    pattern_recognizer = PatternRecognizer("SPACESHIP",
                                           name="Spaceship recognizer",
                                           patterns=[pattern])

    # Make sure the analyzer doesn't get this entity
    recognizers_store_api_mock = RecognizerStoreApiMock()
    analyze_engine = AnalyzerEngine(
        registry=MockRecognizerRegistry(recognizers_store_api_mock),
        nlp_engine=NlpEngineMock(),
    )
    text = "spaceship is my favorite transportation"
    entities = ["CREDIT_CARD", "SPACESHIP"]

    results = analyze_engine.analyze(
        correlation_id=unit_test_guid,
        text=text,
        entities=entities,
        language="en",
        all_fields=False,
    )

    assert len(results) == 0

    # Add a new recognizer for the word "rocket" (case insensitive)
    recognizers_store_api_mock.add_custom_pattern_recognizer(
        pattern_recognizer)
    # Check that the entity is recognized:
    results = analyze_engine.analyze(
        correlation_id=unit_test_guid,
        text=text,
        entities=entities,
        language="en",
        all_fields=False,
    )
    assert len(results) == 1
    assert_result(results[0], "SPACESHIP", 0, 10, 0.8)

    # Remove recognizer
    recognizers_store_api_mock.remove_recognizer("Spaceship recognizer")
    # Test again to see we didn't get any results
    results = analyze_engine.analyze(
        correlation_id=unit_test_guid,
        text=text,
        entities=entities,
        language="en",
        all_fields=False,
    )

    assert len(results) == 0
    def analyze(self, image: object, **kwargs) -> List[ImageRecognizerResult]:
        """Analyse method to analyse the given image.

        :param image: PIL Image/numpy array or file path(str) to be processed

        :return: list of the extract entities with image bounding boxes
        """
        ocr_result = OCR().perform_ocr(image)
        text = OCR().get_text_from_ocr_dict(ocr_result)

        analyzer = AnalyzerEngine()
        analyzer_result = analyzer.analyze(text=text, language="en", **kwargs)
        bboxes = self.map_analyzer_results_to_bounding_boxes(
            analyzer_result, ocr_result, text)
        return bboxes
Exemple #21
0
    def test_when_default_threshold_is_zero_all_results_pass(self):
        text = " Credit card: 4095-2609-9393-4932,  my phone is 425 8829090"
        language = "en"
        entities = ["CREDIT_CARD", "PHONE_NUMBER"]

        # This analyzer engine is different from the global one, as this one
        # also loads SpaCy so it can detect the phone number entity

        analyzer_engine = AnalyzerEngine(
            registry=self.loaded_registry, nlp_engine=MockNlpEngine())
        results = analyzer_engine.analyze(self.unit_test_guid, text,
                                          entities, language,
                                          all_fields=False)

        assert len(results) == 2
def test_when_entities_is_none_then_return_all_fields(loaded_registry):
    analyze_engine = AnalyzerEngine(registry=loaded_registry,
                                    nlp_engine=NlpEngineMock())
    threshold = 0
    text = (" Credit card: 4095-2609-9393-4932,  my phone is 425 8829090 "
            "Domain: microsoft.com")
    response = analyze_engine.analyze(text=text,
                                      score_threshold=threshold,
                                      language="en")
    returned_entities = [response.entity_type for response in response]

    assert response is not None
    assert "CREDIT_CARD" in returned_entities
    assert "PHONE_NUMBER" in returned_entities
    assert "DOMAIN_NAME" in returned_entities
def test_when_entities_is_none_all_recognizers_loaded_then_return_all_fields(
    nlp_engine, ):
    analyze_engine = AnalyzerEngine(registry=RecognizerRegistry(),
                                    nlp_engine=nlp_engine)
    threshold = 0
    text = "My name is Sharon and I live in Seattle." "Domain: microsoft.com "
    response = analyze_engine.analyze(text=text,
                                      score_threshold=threshold,
                                      language="en")
    returned_entities = [response.entity_type for response in response]

    assert response is not None
    assert "PERSON" in returned_entities
    assert "LOCATION" in returned_entities
    assert "DOMAIN_NAME" in returned_entities
Exemple #24
0
    def test_when_allFields_is_true_return_all_fields(self):
        analyze_engine = AnalyzerEngine(registry=MockRecognizerRegistry(),
                                        nlp_engine=MockNlpEngine())
        request = AnalyzeRequest()
        request.analyzeTemplate.allFields = True
        request.analyzeTemplate.resultsScoreThreshold = 0
        request.text = " Credit card: 4095-2609-9393-4932,  my phone is 425 8829090 " \
                       "Domain: microsoft.com"
        response = analyze_engine.Apply(request, None)
        returned_entities = [
            field.field.name for field in response.analyzeResults]

        assert response.analyzeResults is not None
        assert "CREDIT_CARD" in returned_entities
        assert "PHONE_NUMBER" in returned_entities
        assert "DOMAIN_NAME" in returned_entities
def anonymize_text(text: str) -> str:
    try:
        analyzer = AnalyzerEngine()
        anonymizer = AnonymizerEngine()
        analyzer_results = analyzer.analyze(text=text, language="en")
        anonymized_results = anonymizer.anonymize(
            text=text,
            analyzer_results=analyzer_results,
            operators={
                "DEFAULT":
                AnonymizerConfig("replace", {"new_value": "<ANONYMIZED>"})
            },
        )
        return anonymized_results.text
    except Exception as e:
        print(f"An exception occurred. {e}")
def test_when_get_supported_fields_specific_language_then_return_single_result(
        loaded_registry, unit_test_guid, nlp_engine):
    pattern = Pattern("rocket pattern", r"\W*(rocket)\W*", 0.8)
    pattern_recognizer = PatternRecognizer(
        "ROCKET",
        name="Rocket recognizer RU",
        patterns=[pattern],
        supported_language="ru",
    )

    analyzer = AnalyzerEngine(registry=loaded_registry, nlp_engine=nlp_engine)
    analyzer.registry.add_recognizer(pattern_recognizer)
    entities = analyzer.get_supported_entities(language="ru")

    assert len(entities) == 1
    assert "ROCKET" in entities
Exemple #27
0
    def test_get_recognizers_returns_supported_language(self):
        pattern = Pattern("rocket pattern", r'\W*(rocket)\W*', 0.8)
        pattern_recognizer = PatternRecognizer("ROCKET",
                                               name="Rocket recognizer RU",
                                               patterns=[pattern],
                                               supported_language="ru")

        recognizers_store_api_mock = RecognizerStoreApiMock()
        recognizers_store_api_mock.add_custom_pattern_recognizer(
            pattern_recognizer)
        analyze_engine = AnalyzerEngine(
            registry=MockRecognizerRegistry(recognizers_store_api_mock),
            nlp_engine=MockNlpEngine())
        request = RecognizersAllRequest(language="ru")
        response = analyze_engine.GetAllRecognizers(request, None)
        # there is only 1 mocked russian recognizer
        assert len(response) == 1
def test_when_get_recognizers_then_returns_supported_language():
    pattern = Pattern("rocket pattern", r"\W*(rocket)\W*", 0.8)
    pattern_recognizer = PatternRecognizer(
        "ROCKET",
        name="Rocket recognizer RU",
        patterns=[pattern],
        supported_language="ru",
    )
    mock_recognizer_registry = RecognizerRegistryMock()
    mock_recognizer_registry.add_recognizer(pattern_recognizer)
    analyze_engine = AnalyzerEngine(
        registry=mock_recognizer_registry,
        nlp_engine=NlpEngineMock(),
    )
    response = analyze_engine.get_recognizers(language="ru")
    # there is only 1 mocked russian recognizer
    assert len(response) == 1
Exemple #29
0
 def test_when_analyze_then_apptracer_has_value(self):
     text = "My name is Bart Simpson, and Credit card: 4095-2609-9393-4932,  my phone is 425 8829090"
     language = "en"
     entities = ["CREDIT_CARD", "PHONE_NUMBER", "PERSON"]
     analyzer_engine_with_spacy = AnalyzerEngine(self.loaded_registry,
                                                 app_tracer=self.app_tracer,
                                                 enable_trace_pii=True,
                                                 nlp_engine=TESTS_NLP_ENGINE)
     results = analyzer_engine_with_spacy.analyze(correlation_id=self.unit_test_guid,
                                                  text=text,
                                                  entities=entities,
                                                  language=language,
                                                  all_fields=False,
                                                  trace=True)
     assert len(results) == 3
     for result in results:
         assert result.analysis_explanation is not None
     assert self.app_tracer.get_msg_counter() == 2
     assert self.app_tracer.get_last_trace() is not None
Exemple #30
0
    def test_get_recognizers_returns_added_custom(self):
        pattern = Pattern("rocket pattern", r'\W*(rocket)\W*', 0.8)
        pattern_recognizer = PatternRecognizer("ROCKET",
                                               name="Rocket recognizer",
                                               patterns=[pattern])

        recognizers_store_api_mock = RecognizerStoreApiMock()

        analyze_engine = AnalyzerEngine(
            registry=MockRecognizerRegistry(recognizers_store_api_mock),
            nlp_engine=MockNlpEngine())
        request = RecognizersAllRequest(language="en")
        response = analyze_engine.GetAllRecognizers(request, None)
        # there are 15 predefined recognizers
        assert len(response) == 15
        recognizers_store_api_mock.add_custom_pattern_recognizer(
            pattern_recognizer)
        response = analyze_engine.GetAllRecognizers(request, None)
        # there are 15 predefined recognizers and one custom
        assert len(response) == 16