def test_value_factor_with_parameters(self): '''Search should handle field value factor with parameters''' class FakeValueFactorSearch(FakeSearch): boosters = [ search.ValueFactor('some_int_field', factor=1.2, modifier='sqrt', missing=1) ] query = search.search_for(FakeValueFactorSearch) body = get_body(query) # Query should be wrapped in function_score assert 'function_score' in body['query'] assert 'query' in body['query']['function_score'] assert 'functions' in body['query']['function_score'] value_factor = body['query']['function_score']['functions'][0] # Should add be field_value_factor with parameter function assert_json_equal(value_factor, { 'field_value_factor': { 'field': 'some_int_field', 'factor': 1.2, 'modifier': 'sqrt', 'missing': 1 } })
def test_query_with_multiple_including_and_excluding_terms(self): '''A query should detect negation on each term in query_string''' search_query = search.search_for(FakeSearch, q='test -negated1 value -negated2') expected = { 'bool': { 'must': [{ 'multi_match': { 'query': 'test value', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'operator': 'and', 'fields': ['title^2', 'description'] } }], 'must_not': [{ 'multi_match': { 'query': 'negated1', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] } }, { 'multi_match': { 'query': 'negated2', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] } }] } } assert_json_equal(get_query(search_query), expected)
def test_query_with_multiple_including_and_excluding_terms(self): '''A query should detect negation on each term in query_string''' search_query = search.search_for(FakeSearch, q='test -negated1 value -negated2') expected = { 'bool': { 'must': [ {'multi_match': { 'query': 'test value', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'operator': 'and', 'fields': ['title^2', 'description'] }} ], 'must_not': [ {'multi_match': { 'query': 'negated1', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] }}, {'multi_match': { 'query': 'negated2', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] }} ] } } assert_json_equal(get_query(search_query), expected)
def test_zones_api_no_geom(self): zone = GeoZoneFactory(geom=None) url = url_for('api.zones', ids=[zone.id]) response = self.get(url) self.assert200(response) self.assertEqual(len(response.json['features']), 1) feature = response.json['features'][0] self.assertEqual(feature['type'], 'Feature') assert_json_equal(feature['geometry'], { 'type': 'MultiPolygon', 'coordinates': [], }) self.assertEqual(feature['id'], zone.id) properties = feature['properties'] self.assertEqual(properties['name'], zone.name) self.assertEqual(properties['code'], zone.code) self.assertEqual(properties['level'], zone.level) self.assertEqual(properties['parents'], zone.parents) self.assertEqual(properties['population'], zone.population) self.assertEqual(properties['area'], zone.area) self.assertEqual(properties['keys'], zone.keys) self.assertEqual(properties['logo'], zone.logo_url(external=True))
def test_is_json_serializable(self): class Tester(db.Document): extras = db.ExtrasField() @Tester.extras('embedded') class EmbeddedExtra(db.EmbeddedDocument): name = db.StringField(required=True) tester = Tester( extras={ 'test': { 'key': 'value' }, 'embedded': EmbeddedExtra(name='An embedded field'), 'string': 'a value', 'integer': 5, 'float': 5.5, }) assert_json_equal( tester.extras, { 'test': { 'key': 'value' }, 'embedded': { 'name': 'An embedded field' }, 'string': 'a value', 'integer': 5, 'float': 5.5, })
def test_value_factor_with_parameters(self): '''Search should handle field value factor with parameters''' class FakeValueFactorSearch(FakeSearch): boosters = [ search.ValueFactor('some_int_field', factor=1.2, modifier='sqrt', missing=1) ] query = search.search_for(FakeValueFactorSearch) body = get_body(query) # Query should be wrapped in function_score assert 'function_score' in body['query'] assert 'query' in body['query']['function_score'] assert 'functions' in body['query']['function_score'] value_factor = body['query']['function_score']['functions'][0] # Should add be field_value_factor with parameter function assert_json_equal( value_factor, { 'field_value_factor': { 'field': 'some_int_field', 'factor': 1.2, 'modifier': 'sqrt', 'missing': 1 } })
def test_simple_query(self): '''A simple query should use query_string with specified fields''' search_query = search.search_for(FakeSearch, q='test') expected = {'multi_match': { 'query': 'test', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] }} assert_json_equal(get_query(search_query), expected)
def test_with_multiple_terms(self): '''A query with multiple terms should use the AND operator''' search_query = search.search_for(FakeSearch, q='test value') expected = {'multi_match': { 'query': 'test value', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'operator': 'and', 'fields': ['title^2', 'description'] }} assert_json_equal(get_query(search_query), expected)
def test_decay_function_scoring_with_callables(self): '''Search should handle field decay with options''' get_dot5 = lambda: 0.5 # noqa get_5 = lambda: 5 # noqa get_10 = lambda: 10 # noqa get_20 = lambda: 20 # noqa get_30 = lambda: 30 # noqa get_40 = lambda: 40 # noqa class FakeBoostedSearch(FakeSearch): boosters = [ search.GaussDecay('a_num_field', get_10, get_20, offset=get_5, decay=get_dot5), search.ExpDecay('another_field', get_20, scale=get_30, offset=get_5, decay=get_dot5), search.LinearDecay('last_field', get_30, get_40, offset=get_5, decay=get_dot5), ] query = search.search_for(FakeBoostedSearch) body = get_body(query) functions = body['query']['function_score']['functions'] # Query should be wrapped in a gaus decay function assert_json_equal(functions[0], { 'gauss': { 'a_num_field': { 'origin': 10, 'scale': 20, 'offset': 5, 'decay': 0.5, } }, }) assert functions[1] == { 'exp': { 'another_field': { 'origin': 20, 'scale': 30, 'offset': 5, 'decay': 0.5, } }, } assert functions[2] == { 'linear': { 'last_field': { 'origin': 30, 'scale': 40, 'offset': 5, 'decay': 0.5 } }, }
def test_default_type(self): '''Default analyzer is overridable''' class FakeAnalyzerSearch(FakeSearch): match_type = 'most_fields' search_query = search.search_for(FakeAnalyzerSearch, q='test') expected = {'multi_match': { 'query': 'test', 'analyzer': search.i18n_analyzer._name, 'type': 'most_fields', 'fields': ['title^2', 'description'] }} assert_json_equal(get_query(search_query), expected)
def test_with_valid_geom(self): Fake, FakeForm = self.factory() geom = faker.multipolygon() fake = Fake() form = FakeForm(MultiDict({'spatial-geom': json.dumps(geom)})) form.validate() self.assertEqual(form.errors, {}) form.populate_obj(fake) assert_json_equal(fake.spatial.geom, geom)
def test_with_valid_geom_from_json(self): Fake, FakeForm = self.factory() geom = faker.multipolygon() fake = Fake() form = FakeForm.from_json({'spatial': {'geom': geom}}) form.validate() self.assertEqual(form.errors, {}) form.populate_obj(fake) assert_json_equal(fake.spatial.geom, geom)
def test_default_analyzer(self): '''Default analyzer is overridable''' class FakeAnalyzerSearch(FakeSearch): analyzer = 'simple' search_query = search.search_for(FakeAnalyzerSearch, q='test') expected = {'multi_match': { 'query': 'test', 'analyzer': 'simple', 'type': 'cross_fields', 'fields': ['title^2', 'description'] }} assert_json_equal(get_query(search_query), expected)
def test_map_metrics(self): mapping = search.metrics_mapping_for(Fake) assert_json_equal(mapping, { 'type': 'object', 'properties': { 'fake-metric-int': { 'type': 'integer', }, 'fake-metric-float': { 'type': 'float', }, } })
def test_custom_function_scoring(self): '''Search should handle field boosting by function''' class FakeBoostedSearch(FakeSearch): boosters = [search.FunctionBooster('doc["field"].value * 2')] query = search.search_for(FakeBoostedSearch) body = get_body(query) # Query should be wrapped in function_score score_function = body['query']['function_score']['functions'][0] assert_json_equal(score_function, { 'script_score': { 'script': 'doc["field"].value * 2' }, })
def test_map_metrics(self): mapping = search.metrics_mapping_for(FakeSearchable) assert_json_equal( mapping, { 'type': 'object', 'properties': { 'fake-metric-int': { 'type': 'integer', }, 'fake-metric-float': { 'type': 'float', }, } })
def test_custom_function_scoring(self): '''Search should handle field boosting by function''' class FakeBoostedSearch(FakeSearch): boosters = [ search.FunctionBooster('doc["field"].value * 2') ] query = search.search_for(FakeBoostedSearch) body = get_body(query) # Query should be wrapped in function_score score_function = body['query']['function_score']['functions'][0] assert_json_equal(score_function, { 'script_score': {'script': 'doc["field"].value * 2'}, })
def test_with_initial_geom(self): Fake, FakeForm = self.factory() geom = faker.multipolygon() fake = Fake(spatial=SpatialCoverage(geom=geom)) data = {'spatial': {'geom': geom}} form = FakeForm.from_json(data, fake) form.validate() self.assertEqual(form.errors, {}) form.populate_obj(fake) assert_json_equal(fake.spatial.geom, geom)
def test_simple_excluding_query(self): '''A simple query should negate a simple term in query_string''' search_query = search.search_for(FakeSearch, q='-test') expected = { 'bool': { 'must_not': [ {'multi_match': { 'query': 'test', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] }} ] } } assert_json_equal(get_query(search_query), expected)
def test_simple_excluding_query(self): '''A simple query should negate a simple term in query_string''' search_query = search.search_for(FakeSearch, q='-test') expected = { 'bool': { 'must_not': [{ 'multi_match': { 'query': 'test', 'analyzer': search.i18n_analyzer._name, 'type': 'cross_fields', 'fields': ['title^2', 'description'] } }] } } assert_json_equal(get_query(search_query), expected)
def test_to_url_with_none(self, app): kwargs = { 'q': 'test', 'tag': ['tag1', 'tag2'], 'page': 2, } search_query = search.search_for(FakeSearch, **kwargs) with app.test_request_context('/an_url'): url = search_query.to_url(tag=None, other='value', replace=True) parsed_url = url_parse(url) qs = url_decode(parsed_url.query) assert parsed_url.path == '/an_url' assert_json_equal(multi_to_dict(qs), { 'q': 'test', 'other': 'value', })
def test_custom_scoring(self): '''Search should handle field boosting''' class FakeBoostedSearch(FakeSearch): boosters = [ search.BoolBooster('some_bool_field', 1.1) ] query = search.search_for(FakeBoostedSearch) body = get_body(query) # Query should be wrapped in function_score assert 'function_score' in body['query'] assert 'query' in body['query']['function_score'] assert 'functions' in body['query']['function_score'] first_function = body['query']['function_score']['functions'][0] assert_json_equal(first_function, { 'filter': {'term': {'some_bool_field': True}}, 'boost_factor': 1.1, })
def test_to_url_with_specified_url(self, app): kwargs = { 'q': 'test', 'tag': ['tag1', 'tag2'], 'page': 2, } search_query = search.search_for(FakeSearch, **kwargs) with app.test_request_context('/an_url'): url = search_query.to_url('/another_url') parsed_url = url_parse(url) qs = url_decode(parsed_url.query) assert parsed_url.path == '/another_url' assert_json_equal(multi_to_dict(qs), { 'q': 'test', 'tag': ['tag1', 'tag2'], 'page': '2', })
def test_coverage_for_level(self): GeoLevelFactory(id='top') GeoLevelFactory(id='sub', parents=['top']) GeoLevelFactory(id='child', parents=['sub']) topzones, subzones, childzones = [], [], [] for _ in range(2): zone = GeoZoneFactory(level='top') topzones.append(zone) for _ in range(2): subzone = GeoZoneFactory(level='sub', parents=[zone.id]) subzones.append(subzone) for _ in range(2): childzone = GeoZoneFactory( level='child', parents=[zone.id, subzone.id]) childzones.append(childzone) for zone in topzones + subzones + childzones: VisibleDatasetFactory( spatial=SpatialCoverageFactory(zones=[zone.id])) response = self.get(url_for('api.spatial_coverage', level='sub')) self.assert200(response) self.assertEqual(len(response.json['features']), len(subzones)) for feature in response.json['features']: self.assertEqual(feature['type'], 'Feature') zone = get_by(subzones, 'id', feature['id']) self.assertIsNotNone(zone) assert_json_equal(feature['geometry'], zone.geom) properties = feature['properties'] self.assertEqual(properties['name'], zone.name) self.assertEqual(properties['code'], zone.code) self.assertEqual(properties['level'], 'sub') # Nested levels datasets should be counted self.assertEqual(properties['datasets'], 3)
def test_zones_api_many(self): zones = [GeoZoneFactory() for _ in range(3)] url = url_for('api.zones', ids=zones) response = self.get(url) self.assert200(response) self.assertEqual(len(response.json['features']), len(zones)) for zone, feature in zip(zones, response.json['features']): self.assertEqual(feature['type'], 'Feature') assert_json_equal(feature['geometry'], zone.geom) self.assertEqual(feature['id'], zone.id) properties = feature['properties'] self.assertEqual(properties['name'], zone.name) self.assertEqual(properties['code'], zone.code) self.assertEqual(properties['level'], zone.level) self.assertEqual(properties['parents'], zone.parents) self.assertEqual(properties['population'], zone.population) self.assertEqual(properties['area'], zone.area) self.assertEqual(properties['keys'], zone.keys) self.assertEqual(properties['logo'], zone.logo_url(external=True))
def test_is_json_serializable(self): class Tester(db.Document): extras = db.ExtrasField() @Tester.extras('embedded') class EmbeddedExtra(db.EmbeddedDocument): name = db.StringField(required=True) tester = Tester(extras={ 'test': {'key': 'value'}, 'embedded': EmbeddedExtra(name='An embedded field'), 'string': 'a value', 'integer': 5, 'float': 5.5, }) assert_json_equal(tester.extras, { 'test': {'key': 'value'}, 'embedded': {'name': 'An embedded field'}, 'string': 'a value', 'integer': 5, 'float': 5.5, })
def test_decay_function_scoring_with_options(self): '''Search should handle field decay with options''' class FakeBoostedSearch(FakeSearch): boosters = [ search.GaussDecay('a_num_field', 10, 20, offset=5, decay=0.5), search.ExpDecay('another_field', 20, scale=30, offset=5, decay=0.5), search.LinearDecay('last_field', 30, 40, offset=5, decay=0.5), ] query = search.search_for(FakeBoostedSearch) body = get_body(query) functions = body['query']['function_score']['functions'] # Query should be wrapped in a gaus decay function assert_json_equal( functions[0], { 'gauss': { 'a_num_field': { 'origin': 10, 'scale': 20, 'offset': 5, 'decay': 0.5, } }, }) assert_json_equal( functions[1], { 'exp': { 'another_field': { 'origin': 20, 'scale': 30, 'offset': 5, 'decay': 0.5, } }, }) assert_json_equal( functions[2], { 'linear': { 'last_field': { 'origin': 30, 'scale': 40, 'offset': 5, 'decay': 0.5 } }, })
def test_decay_function_scoring_with_options(self): '''Search should handle field decay with options''' class FakeBoostedSearch(FakeSearch): boosters = [ search.GaussDecay('a_num_field', 10, 20, offset=5, decay=0.5), search.ExpDecay( 'another_field', 20, scale=30, offset=5, decay=0.5), search.LinearDecay('last_field', 30, 40, offset=5, decay=0.5), ] query = search.search_for(FakeBoostedSearch) body = get_body(query) functions = body['query']['function_score']['functions'] # Query should be wrapped in a gaus decay function assert_json_equal(functions[0], { 'gauss': { 'a_num_field': { 'origin': 10, 'scale': 20, 'offset': 5, 'decay': 0.5, } }, }) assert_json_equal(functions[1], { 'exp': { 'another_field': { 'origin': 20, 'scale': 30, 'offset': 5, 'decay': 0.5, } }, }) assert_json_equal(functions[2], { 'linear': { 'last_field': { 'origin': 30, 'scale': 40, 'offset': 5, 'decay': 0.5 } }, })