Ejemplo n.º 1
0
def test_synonym_http_error():
    """
    Tests one synonym call with an http error
    """
    req_list = MockResultHTPPError(json_mock())
    with mock.patch('requests_futures.sessions.FuturesSession.get',
                    return_value=req_list):
        with pytest.raises(requests.HTTPError):
            synonym_extractor.create_synonym_list_esp(["word"])
Ejemplo n.º 2
0
def test_one_synonym():
    """
    Tests one synonym call with default max synonyms (5)
    """
    expected = ["my_synonym"] * constants.DEFAULT_MAX_SYNONYMS
    req_list = MockResult(json_mock())
    with mock.patch('requests_futures.sessions.FuturesSession.get',
                    return_value=req_list):

        assert expected == synonym_extractor.create_synonym_list_esp(["word"])
Ejemplo n.º 3
0
def test_no_synonyms():
    """
    Tests no synonym calls
    """
    expected = []

    req_list = MockResult(json_mock())
    with mock.patch('requests_futures.sessions.FuturesSession.get',
                    return_value=req_list):

        assert expected == synonym_extractor.create_synonym_list_esp([])
Ejemplo n.º 4
0
def test_increased_max_synonyms():
    """
    Tests one synonym call with increased max synonyms (8)
    """
    expected = ["my_synonym"] * constants.INCREASED_MAX_SYNONYMS

    req_list = MockResult(json_mock())
    with mock.patch('requests_futures.sessions.FuturesSession.get',
                    return_value=req_list):

        assert expected == synonym_extractor.create_synonym_list_esp(
            ["word1"], max_synonyms=constants.INCREASED_MAX_SYNONYMS)
Ejemplo n.º 5
0
    def query(self, query):
        """
        Extracts keywords, from keywords endpoint,
        and synonyms, from local function,
        to call search query and calculate the score for articles
        Args:
            query: query string
        Returns:
            A map of the score of every article
        """
        query_text = {'text': query}

        logging.debug("keywords location: %s", env.get_keyword_endpoint())

        response = requests.post(env.get_keyword_endpoint(), json=query_text)
        response.raise_for_status()
        keyword_response = response.json()
        logging.info("keywords response: %s", keyword_response)

        if 'error' in keyword_response:
            raise Exception(keyword_response['error']['message'])

        lan = keyword_response['lan']

        if lan not in constants.SUPPORTED_LANGUAGES:
            logging.warning("%s not supported", lan)

        keywords = []

        for token in keyword_response['tokens']:
            keywords.append(token['lemma'])

        logging.info("keywords: %s", keywords)

        synonyms = synonym_extractor.create_synonym_list_esp(keywords)

        logging.info("synonyms: %s", synonyms)

        return self.search_query(keywords, synonyms)