def test_nest_dicts_api(input_file, expected_output_file, nesting_keys): input_data = read_json_fixture(input_file) expected_response = read_json_fixture(expected_output_file) response = api_client.post( "/api/v1/nest_dicts", params={"nesting_keys": nesting_keys}, json=input_data, headers=get_basicauth_headers(), ) assert response.status_code == 200 assert response.json() == expected_response
def test_mkdocs_produces_same_results(): query_string = "plugins" js_results = run_node_script("mkdocs_query.js", query_string).split("\n") data = read_json_fixture("mkdocs_index.json") index = lunr(ref="id", fields=("title", "text"), documents=data["docs"]) results = index.search(query_string) assert_results_match(results, js_results)
def test_main(input_file, expected_output_file, nesting_keys): expected = read_json_fixture(expected_output_file) child = pexpect_spawn_nest_script(input_file, nesting_keys) output = json.loads(child.read()) assert output == expected
def test_languages_query_results_match_javascript_results(): query_string = "resistencia" js_results = run_node_script("language_query.js", query_string).split("\n") data = read_json_fixture("lang_es.json") index = lunr(ref="id", fields=("title", "text"), documents=data["docs"], languages="es") results = index.search(query_string) assert_results_match(results, js_results, tol=0.1)
def test_serialized_index_can_be_loaded_in_js_and_produces_same_results(): data = read_json_fixture("mkdocs_index.json") index = lunr(ref="id", fields=("title", "text"), documents=data["docs"]) query_string = "plugins" results = index.search(query_string) serialized_index = index.serialize() with tempfile.NamedTemporaryFile(delete=False) as fp: fp.write(json.dumps(serialized_index).encode()) js_results = run_node_script("mkdocs_load_serialized_index_and_search.js", fp.name, query_string).split("\n") assert_results_match(results, js_results)
def test_validate_item(): input_data = read_json_fixture("input.json") item = input_data[0] # asserts that exception is not raised for valid keys nesting_keys = {"currency", "city"} validate_item(item, nesting_keys) # asserts that exception is raised for invalid keys invalid_nesting_keys = {"garbage_key"} with pytest.raises(ValueError) as execinfo: validate_item(item, invalid_nesting_keys) assert ( f"Nesting key(s): {invalid_nesting_keys} doesn't exist inside item: {item}" in str(execinfo.value))
def test_serialized_multilang_index_can_be_loaded_in_js_and_results_equal(): data = read_json_fixture("lang_es_en.json") index = lunr( ref="id", fields=("title", "text"), documents=data["docs"], languages=["es", "en"], ) query_string = "taxation" results = index.search(query_string) serialized_index = index.serialize() with tempfile.NamedTemporaryFile(delete=False) as fp: fp.write(json.dumps(serialized_index).encode()) js_results = run_node_script( "language_load_serialized_index_and_search.js", fp.name, query_string, "lang_es_en.json", ).split("\n") assert_results_match(results, js_results)
def get_mkdocs_index(): data = read_json_fixture("mkdocs_index.json") return lunr(ref="id", fields=("title", "text"), documents=data["docs"])
def test_nest_dicts(input_file, expected_output_file, nesting_keys): input_data = read_json_fixture(input_file) expected = read_json_fixture(expected_output_file) output = nest_dicts(input_data, nesting_keys) assert output == expected