def test_allowed(self): api_key = ApiKeyFactory(allow_locate=True) self.assertTrue(api_key.allowed('locate')) self.assertTrue(api_key.allowed('region')) self.assertTrue(api_key.allowed('submit')) self.assertTrue(api_key.allowed('unknown') is None) self.assertFalse(ApiKeyFactory(allow_locate=None).allowed('locate')) self.assertFalse(ApiKeyFactory(allow_locate=False).allowed('locate'))
def test_allowed(self): api_key = ApiKeyFactory.build(allow_locate=True, allow_transfer=True) assert api_key.allowed('locate') assert api_key.allowed('region') assert api_key.allowed('submit') assert api_key.allowed('transfer') assert api_key.allowed('unknown') is None assert not ApiKeyFactory.build(allow_locate=None).allowed('locate') assert not ApiKeyFactory.build(allow_locate=False).allowed('locate') assert not ApiKeyFactory.build(allow_transfer=None).allowed('transfer')
def test_monitor_api_keys_multiple(self): redis_client = self.redis_client now = util.utcnow() today = now.strftime('%Y%m%d') yesterday = (now - timedelta(hours=24)).strftime('%Y%m%d') data = { 'test': { 'v1.search': 11, 'v1.geolocate': 13 }, 'no_key_1': { 'v1.search': 12 }, 'no_key_2': { 'v1.geolocate': 15 }, } for key, paths in data.items(): for path, value in paths.items(): rate_key = 'apilimit:%s:%s:%s' % (key, path, today) redis_client.incr(rate_key, value) rate_key = 'apilimit:%s:%s:%s' % (key, path, yesterday) redis_client.incr(rate_key, value - 10) ApiKeyFactory(valid_key='no_key_1', shortname='shortname_1') ApiKeyFactory(valid_key='no_key_2') ApiKeyFactory(valid_key='no_key_3', shortname='shortname_3') self.session.flush() # add some other items into Redis redis_client.lpush('default', 1, 2) redis_client.set('cache_something', '{}') result = monitor_api_key_limits.delay().get() self.check_stats(gauge=[ ('api.limit', ['key:test', 'path:v1.geolocate']), ('api.limit', ['key:test', 'path:v1.search']), ('api.limit', ['key:shortname_1', 'path:v1.search']), ('api.limit', ['key:no_key_2', 'path:v1.geolocate']), ]) self.assertDictEqual( result, { 'test': { 'v1.search': 11, 'v1.geolocate': 13 }, 'shortname_1': { 'v1.search': 12 }, 'no_key_2': { 'v1.geolocate': 15 }, })
def test_queues(self, celery, redis, session): ApiKeyFactory(valid_key='test2') ExportConfigFactory(name='test', batch=3, skip_keys=frozenset(['export_source'])) ExportConfigFactory(name='everything', batch=5) ExportConfigFactory(name='no_test', batch=2, skip_keys=frozenset(['test', 'test_1'])) ExportConfigFactory(name='query', batch=2, skip_sources=frozenset(['gnss'])) session.flush() self.add_reports(celery, 4) self.add_reports(celery, 1, api_key='test2') self.add_reports(celery, 2, api_key=None, source='gnss') self.add_reports(celery, 1, api_key='test', source='query') update_incoming.delay().get() for queue_key, num in [('queue_export_test', 2), ('queue_export_everything', 3), ('queue_export_no_test', 1), ('queue_export_query', 1)]: assert self.queue_length(redis, queue_key) == num
def test_signature(self, app, session, redis, logs): """Identical requests have the same signature.""" # Make two identical calls self._call(app, api_key="test", ip=self.test_ip) self._call(app, api_key="test", ip=self.test_ip) # Switch the IP address other_ip = "81.2.69.143" assert other_ip != self.test_ip self._call(app, api_key="test", ip=other_ip) # Switch the API key other_key = ApiKeyFactory() session.flush() self._call(app, api_key=other_key.valid_key, ip=self.test_ip) log1, log2, log_new_ip, log_new_key = logs.entries # First two calls have identical signatures assert log1["api_response_sig"] == log2["api_response_sig"] seen_sigs = set([log1["api_response_sig"]]) # A different IP has a different signature assert log_new_ip["api_response_sig"] not in seen_sigs seen_sigs.add(log_new_ip["api_response_sig"]) # A different API key has a different signature assert log_new_key["api_response_sig"] not in seen_sigs
def setUp(self): super(TestSource, self).setUp() self.api_key = ApiKeyFactory.build(allow_fallback=True) self.fallback_model = DummyModel(lat=51.5366, lon=0.03989, radius=1500.0) self.fallback_result = { 'location': { 'lat': self.fallback_model.lat, 'lng': self.fallback_model.lon, }, 'accuracy': float(self.fallback_model.radius), 'fallback': 'lacf', } self.fallback_cached_result = floatjson.float_dumps({ 'lat': self.fallback_model.lat, 'lon': self.fallback_model.lon, 'accuracy': float(self.fallback_model.radius), 'fallback': 'lacf', })
def test_api_key_disallows(self): api_key = ApiKeyFactory.build(allow_fallback=False) cells = CellShardFactory.build_batch(2) wifis = WifiShardFactory.build_batch(2) query = self.model_query(cells=cells, wifis=wifis, api_key=api_key) self.check_should_search(query, False)
def test_queues(self, celery, redis, session): ApiKeyFactory(valid_key="test2") ExportConfigFactory(name="test", batch=3, skip_keys=frozenset(["export_source"])) ExportConfigFactory(name="everything", batch=5) ExportConfigFactory(name="no_test", batch=2, skip_keys=frozenset(["test", "test_1"])) ExportConfigFactory(name="query", batch=2, skip_sources=frozenset(["gnss"])) session.flush() self.add_reports(celery, 4) self.add_reports(celery, 1, api_key="test2") self.add_reports(celery, 2, api_key=None, source="gnss") self.add_reports(celery, 1, api_key="test", source="query") update_incoming.delay().get() for queue_key, num in [ ("queue_export_test", 2), ("queue_export_everything", 3), ("queue_export_no_test", 1), ("queue_export_query", 1), ]: assert self.queue_length(redis, queue_key) == num
def test_stats(self, celery, session, stats): ApiKeyFactory(valid_key='e5444-794') session.flush() self.add_reports(celery, 3) self.add_reports(celery, 3, api_key='e5444-794', source='gnss') self.add_reports(celery, 3, api_key='e5444-794', source='fused') self.add_reports(celery, 3, api_key=None) self._update_all(session) stats.check(counter=[ ('data.export.batch', 1, 1, ['key:internal']), ('data.report.upload', 2, 3), ('data.report.upload', 1, 3, ['key:test']), ('data.report.upload', 1, 6, ['key:e5444-794']), ('data.observation.upload', 1, 3, ['type:cell', 'key:test']), ('data.observation.upload', 1, 6, ['type:wifi', 'key:test']), ('data.observation.upload', 0, ['type:cell', 'key:no_key']), ('data.observation.upload', 1, 6, ['type:cell', 'key:e5444-794']), ('data.observation.upload', 1, 12, ['type:wifi', 'key:e5444-794']), ]) # we get a variable number of statsd messages and are only # interested in the sum-total for name, total in (('cell', 12), ('wifi', 24)): insert_msgs = [ msg for msg in stats.msgs if (msg.startswith('data.observation.insert') and 'type:' + name in msg) ] assert (sum([ int(msg.split(':')[1].split('|')[0]) for msg in insert_msgs ]) == total)
def test_api_key_limit(self, app, data_queues, redis, session, logs): """When daily API limit is reached, a 403 is returned.""" api_key = ApiKeyFactory(maxreq=5) session.flush() # exhaust today's limit dstamp = util.utcnow().strftime("%Y%m%d") path = self.metric_path.split(":")[-1] key = "apilimit:%s:%s:%s" % (api_key.valid_key, path, dstamp) redis.incr(key, 10) res = self._call(app, api_key=api_key.valid_key, ip=self.test_ip, status=403) self.check_response(data_queues, res, "limit_exceeded") expected_entry = { "api_key": api_key.valid_key, "api_key_count": 11, "api_key_repeat_ip": False, "api_path": self.metric_path.split(":")[1], "api_type": self.metric_type, "duration_s": logs.only_entry["duration_s"], "event": f"POST {self.url} - 403", "http_method": "POST", "http_path": self.url, "http_status": 403, "log_level": "info", "rate_allowed": False, "rate_quota": 5, "rate_remaining": 0, } assert logs.only_entry == expected_entry
def test_api_key_disallows(self): api_key = ApiKeyFactory.build(allow_fallback=False) cells = CellFactory.build_batch(2) wifis = WifiShardFactory.build_batch(2) query = self.model_query(cells=cells, wifis=wifis, api_key=api_key) self.check_should_search(query, False)
def test_api_key_blocked(self, app, data_queues, session): api_key = ApiKeyFactory(allow_locate=False, allow_region=False) session.flush() res = self._call( app, api_key=api_key.valid_key, ip=self.test_ip, status=400) self.check_response(data_queues, res, 'invalid_key')
def test_stats(self): ApiKeyFactory(valid_key='e5444-794', log_submit=True) self.session.flush() self.add_reports(3) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) self._update_all() self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:internal']), ('data.report.upload', 2, 3), ('data.report.upload', 1, 3, ['key:test']), ('data.report.upload', 1, 6, ['key:e5444-794']), ('data.observation.upload', 1, 3, ['type:cell', 'key:test']), ('data.observation.upload', 1, 6, ['type:wifi', 'key:test']), ('data.observation.upload', 0, ['type:cell', 'key:no_key']), ('data.observation.upload', 1, 6, ['type:cell', 'key:e5444-794']), ('data.observation.upload', 1, 12, ['type:wifi', 'key:e5444-794']), ]) # we get a variable number of statsd messages and are only # interested in the sum-total for name, total in (('cell', 12), ('wifi', 24)): insert_msgs = [ msg for msg in self.stats_client.msgs if (msg.startswith('data.observation.insert') and 'type:' + name in msg) ] self.assertEqual( sum([ int(msg.split(':')[1].split('|')[0]) for msg in insert_msgs ]), total)
def setUp(self): super(TestExporter, self).setUp() config = DummyConfig({ 'export:test': { 'url': None, 'skip_keys': 'export_source', 'batch': '3', }, 'export:everything': { 'url': '', 'batch': '5', }, 'export:no_test': { 'skip_keys': 'test_1 test\ntest:-1', 'batch': '2', }, 'export:invalid_ftp': { 'url': 'ftp://127.0.0.1:9/', 'batch': '5', }, 'export:invalid': { 'url': 'no_url', 'batch': '5', }, }) self.celery_app.export_queues = queues = configure_export( self.redis_client, config) self.test_queue_key = queues['test'].queue_key() ApiKeyFactory(valid_key='test2', log_submit=True) self.session.flush()
def test_no_log(self, geoip_db, stats): api_key = ApiKeyFactory.build(valid_key=None) self._make_query(geoip_db, stats, self._make_result(), api_key=api_key, api_type='locate') stats.check(total=0)
def test_no_api_key_shortname(self): api_key = ApiKeyFactory.build(shortname=None, log_locate=True) cell = CellFactory.build() self._make_query(api_key=api_key, cell=[cell]) self.check_stats(counter=[ ('locate.query', ['key:%s' % api_key.valid_key, 'region:none', 'geoip:false', 'cell:one', 'wifi:none']), ])
def test_store_sample(self, app, data_queues, session): api_key = ApiKeyFactory(store_sample_locate=0) cell = CellShardFactory() session.flush() query = self.model_query(cells=[cell]) res = self._call(app, body=query, api_key=api_key.valid_key, status=200) self.check_model_response(res, cell) self.check_queue(data_queues, 0)
def test_no_api_key_shortname(self): api_key = ApiKeyFactory.build(shortname=None, log_locate=True) cell = CellShardFactory.build() self._make_query(api_key=api_key, cell=[cell]) self.check_stats(counter=[ ('locate.query', ['key:%s' % api_key.valid_key, 'region:none', 'geoip:false', 'cell:one', 'wifi:none']), ])
def test_null_position(self, celery, redis, session): """Reports with null position are queued.""" ApiKeyFactory(valid_key="no-position") ExportConfigFactory(name="everything", batch=5) session.flush() self.add_reports(celery, 1, api_key="no-position", set_position=False) update_incoming.delay().get() assert self.queue_length(redis, "queue_export_everything") == 1
def setUpClass(cls): super(SourceTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(valid_key='key') cls.source = cls.TestSource( geoip_db=cls.geoip_db, raven_client=cls.raven_client, redis_client=cls.redis_client, stats_client=cls.stats_client, )
def _setup_table_contents(conn): # Avoid import cycle from ichnaea.tests.factories import ApiKeyFactory conn.execute(text('DELETE FROM api_key')) conn.execute(text('DELETE FROM export_config')) key = ApiKeyFactory.build(valid_key='test') state = dict(key.__dict__) del state['_sa_instance_state'] conn.execute(key.__table__.insert().values(state))
def test_store_sample(self): api_key = ApiKeyFactory.build(store_sample_locate=None, store_sample_submit=None) assert api_key.store_sample('locate') is False assert api_key.store_sample('submit') is False assert api_key.store_sample('region') is False assert api_key.store_sample('transfer') is False api_key = ApiKeyFactory.build(store_sample_locate=0, store_sample_submit=100) assert api_key.store_sample('locate') is False assert api_key.store_sample('submit') is True api_key = ApiKeyFactory.build(store_sample_locate=50) results = [] for i in range(20): results.append(api_key.store_sample('locate')) assert True in results assert False in results
def setUp(self): super(BaseSourceTest, self).setUp() self.api_key = ApiKeyFactory.build(shortname='test') self.source = self.TestSource( settings=self.settings, geoip_db=self.geoip_db, raven_client=self.raven_client, redis_client=self.redis_client, stats_client=self.stats_client, )
def setUpClass(cls): super(SourceTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(shortname='key') cls.source = cls.TestSource( settings={'foo': '1'}, geoip_db=cls.geoip_db, raven_client=cls.raven_client, redis_client=cls.redis_client, stats_client=cls.stats_client, )
def setUpClass(cls): super(SourceTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(shortname='key', log=True) cls.source = cls.TestSource( settings={'foo': '1'}, geoip_db=cls.geoip_db, raven_client=cls.raven_client, redis_client=cls.redis_client, stats_client=cls.stats_client, )
def test_api_key_blocked(self, app, data_queues, session, logs): """A 400 is returned when a key is blocked from locate APIs.""" api_key = ApiKeyFactory(allow_locate=False, allow_region=False) session.flush() res = self._call(app, api_key=api_key.valid_key, ip=self.test_ip, status=400) self.check_response(data_queues, res, "invalid_key") log = logs.only_entry assert log["api_key"] == api_key.valid_key assert not log["api_key_allowed"]
def test_upload(self, celery, session, metricsmock): ApiKeyFactory(valid_key="e5444-7946") ExportConfigFactory( name="test", batch=4, schema="geosubmit", url="http://127.0.0.1:9/v2/geosubmit?key=external", ) session.flush() reports = [] reports.extend(self.add_reports(celery, 1, source="gnss")) reports.extend(self.add_reports(celery, 1, api_key="e5444e9f-7946")) reports.extend( self.add_reports(celery, 1, api_key=None, source="fused")) reports.extend(self.add_reports(celery, 1, set_position=False)) with requests_mock.Mocker() as mock: mock.register_uri("POST", requests_mock.ANY, text="{}") update_incoming.delay().get() assert mock.call_count == 1 req = mock.request_history[0] # check headers assert req.headers["Content-Type"] == "application/json" assert req.headers["Content-Encoding"] == "gzip" assert req.headers["User-Agent"] == "ichnaea" body = util.decode_gzip(req.body) send_reports = json.loads(body)["items"] assert len(send_reports) == 4 for field in ("accuracy", "source", "timestamp"): expect = [(report["position"] or {}).get(field) for report in reports] gotten = [(report["position"] or {}).get(field) for report in send_reports] assert set(expect) == set(gotten) assert set([w["ssid"] for w in send_reports[0]["wifiAccessPoints"] ]) == set(["my-wifi"]) assert metricsmock.has_record("incr", "data.export.batch", value=1, tags=["key:test"]) assert metricsmock.has_record("incr", "data.export.upload", value=1, tags=["key:test", "status:200"]) assert metricsmock.has_record("timing", "data.export.upload.timing", tags=["key:test"])
def make_query(self, geoip_db, http_session, session, stats, **kw): api_key = kw.pop( 'api_key', ApiKeyFactory.build(valid_key='test', allow_fallback=True)) return Query(api_key=api_key, api_type=self.api_type, session=session, http_session=http_session, geoip_db=geoip_db, stats_client=stats, **kw)
def test_api_key_limit(self, app, data_queues, redis, session): api_key = ApiKeyFactory(maxreq=5) session.flush() # exhaust today's limit dstamp = util.utcnow().strftime("%Y%m%d") path = self.metric_path.split(":")[-1] key = "apilimit:%s:%s:%s" % (api_key.valid_key, path, dstamp) redis.incr(key, 10) res = self._call(app, api_key=api_key.valid_key, ip=self.test_ip, status=403) self.check_response(data_queues, res, "limit_exceeded")
def _setup_table_data(engine): # Avoid import cycle from ichnaea.tests.factories import ApiKeyFactory with engine.connect() as conn: with conn.begin() as trans: conn.execute(text("DELETE FROM api_key")) conn.execute(text("DELETE FROM export_config")) key = ApiKeyFactory.build(valid_key="test") state = dict(key.__dict__) del state["_sa_instance_state"] conn.execute(key.__table__.insert().values(state)) trans.commit()
def test_get(self, session, session_tracker): api_key = ApiKeyFactory() session.flush() session_tracker(1) result = get_key(session, api_key.valid_key) assert isinstance(result, Key) session_tracker(2) # Test get cache result2 = get_key(session, api_key.valid_key) assert isinstance(result2, Key) session_tracker(2)
def make_query(self, geoip_db, http_session, session, stats, **kw): api_key = kw.pop( 'api_key', ApiKeyFactory.build(valid_key='test', allow_fallback=True)) return Query( api_key=api_key, api_type=self.api_type, session=session, http_session=http_session, geoip_db=geoip_db, stats_client=stats, **kw)
def test_api_key_limit(self): api_key = ApiKeyFactory(maxreq=5) self.session.flush() # exhaust today's limit dstamp = util.utcnow().strftime('%Y%m%d') path = self.metric_path.split(':')[-1] key = 'apilimit:%s:%s:%s' % (api_key.valid_key, path, dstamp) self.redis_client.incr(key, 10) res = self._call( api_key=api_key.valid_key, ip=self.test_ip, status=403) self.check_response(res, 'limit_exceeded')
def test_fallback(self, app, session, stats): # this tests a cell + wifi based query which gets a cell based # internal result and continues on to the fallback to get a # better wifi based result cells = CellShardFactory.create_batch(2, radio=Radio.wcdma) wifis = WifiShardFactory.build_batch(3) ApiKeyFactory(valid_key='fall', allow_fallback=True) session.flush() with requests_mock.Mocker() as mock: response_result = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100, } mock.register_uri('POST', requests_mock.ANY, json=response_result) query = self.model_query(cells=cells, wifis=wifis) res = self._call(app, api_key='fall', body=query) send_json = mock.request_history[0].json() assert len(send_json['cellTowers']) == 2 assert len(send_json['wifiAccessPoints']) == 3 assert send_json['cellTowers'][0]['radioType'] == 'wcdma' self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) stats.check(counter=[ ('request', [self.metric_path, 'method:post', 'status:200']), (self.metric_type + '.request', [self.metric_path, 'key:fall']), (self.metric_type + '.query', [ 'key:fall', 'region:none', 'geoip:false', 'blue:none', 'cell:many', 'wifi:many' ]), (self.metric_type + '.result', [ 'key:fall', 'region:none', 'fallback_allowed:true', 'accuracy:high', 'status:hit', 'source:fallback' ]), (self.metric_type + '.source', [ 'key:fall', 'region:none', 'source:internal', 'accuracy:high', 'status:miss' ]), (self.metric_type + '.source', [ 'key:fall', 'region:none', 'source:fallback', 'accuracy:high', 'status:hit' ]), ], timer=[ ('request', [self.metric_path, 'method:post']), ])
def _make_query(self, geoip_db, stats, source, results, api_key=None, api_type='locate', blue=(), cell=(), wifi=(), **kw): query = Query( api_key=api_key or ApiKeyFactory.build(valid_key='test'), api_type=api_type, blue=self.blue_model_query(blue), cell=self.cell_model_query(cell), wifi=self.wifi_model_query(wifi), geoip_db=geoip_db, stats_client=stats, **kw) query.emit_source_stats(source, results) return query
def _search(self, data_queues, geoip_db, raven, redis, stats, session, klass, **kw): query = Query( api_key=ApiKeyFactory.build(valid_key='test'), api_type='locate', session=session, stats_client=stats, **kw) searcher = klass( geoip_db=geoip_db, raven_client=raven, redis_client=redis, stats_client=stats, data_queues=data_queues, ) return searcher.search(query)
def setUpClass(cls): super(BaseSourceTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(valid_key='test') bhutan = cls.geoip_data['Bhutan'] cls.bhutan_model = DummyModel( lat=bhutan['latitude'], lon=bhutan['longitude'], radius=bhutan['radius'], code=bhutan['region_code'], name=bhutan['region_name'], ip=bhutan['ip']) london = cls.geoip_data['London'] cls.london_model = DummyModel( lat=london['latitude'], lon=london['longitude'], radius=london['radius'], code=london['region_code'], name=london['region_name'], ip=london['ip'])
def setUp(self): super(TestSource, self).setUp() self.api_key = ApiKeyFactory.build(allow_fallback=True) self.fallback_model = DummyModel( lat=51.5366, lon=0.03989, radius=1500.0) self.fallback_result = { 'location': { 'lat': self.fallback_model.lat, 'lng': self.fallback_model.lon, }, 'accuracy': float(self.fallback_model.radius), 'fallback': 'lacf', } self.fallback_cached_result = floatjson.float_dumps({ 'lat': self.fallback_model.lat, 'lon': self.fallback_model.lon, 'accuracy': float(self.fallback_model.radius), 'fallback': 'lacf', })
def test_no_log(self): api_key = ApiKeyFactory.build(shortname='key', log=False) self._make_query(api_key=api_key) self.check_stats(total=0)
def test_no_log(self): api_key = ApiKeyFactory.build(shortname='key', log_locate=False) self._make_query(self._make_result(), api_key=api_key, api_type='locate') self.check_stats(total=0)
def test_can_fallback(self): assert ApiKeyFactory.build(allow_fallback=True).can_fallback() assert not ApiKeyFactory.build(allow_fallback=False).can_fallback() assert not ApiKeyFactory.build(allow_fallback=None).can_fallback() assert not (ApiKeyFactory.build( allow_fallback=True, fallback_name=None).can_fallback()) assert not (ApiKeyFactory.build( allow_fallback=True, fallback_url=None).can_fallback()) assert not (ApiKeyFactory.build( allow_fallback=True, fallback_ratelimit=None).can_fallback()) assert (ApiKeyFactory.build( allow_fallback=True, fallback_ratelimit=0).can_fallback()) assert not (ApiKeyFactory.build( allow_fallback=True, fallback_ratelimit_interval=None).can_fallback()) assert not (ApiKeyFactory.build( allow_fallback=True, fallback_ratelimit_interval=0).can_fallback()) assert (ApiKeyFactory.build( allow_fallback=True, fallback_cache_expire=None).can_fallback()) assert (ApiKeyFactory.build( allow_fallback=True, fallback_cache_expire=0).can_fallback())
def setUp(self): super(SearcherTest, self).setUp() self.api_key = ApiKeyFactory.build(shortname='test') self.api_type = 'locate'
def test_api_key(self): api_key = ApiKeyFactory.build() query = Query(api_key=api_key) self.assertEqual(query.api_key.valid_key, api_key.valid_key) self.assertEqual(query.api_key, api_key)
def test_no_log(self, geoip_db, stats): api_key = ApiKeyFactory.build(valid_key=None) self._make_query( geoip_db, stats, self._make_result(), api_key=api_key, api_type='locate') stats.check(total=0)
def test_api_key(self): api_key = ApiKeyFactory.build() query = Query(api_key=api_key) assert query.api_key.valid_key == api_key.valid_key assert query.api_key == api_key
def setUpClass(cls): super(QueryTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(valid_key='key') cls.london = cls.geoip_data['London'] cls.london_ip = cls.london['ip']
def setUpClass(cls): super(QueryTest, cls).setUpClass() cls.api_key = ApiKeyFactory.build(shortname='key', log=True) cls.london = cls.geoip_data['London'] cls.london_ip = cls.london['ip']
def test_no_log(self): api_key = ApiKeyFactory.build(valid_key=None) self._make_query(self._make_result(), api_key=api_key, api_type='locate') self.check_stats(total=0)
def setUp(self): super(SearcherTest, self).setUp() self.api_key = ApiKeyFactory.build(valid_key='key') self.api_type = 'locate'