def test_monitor_api_keys_multiple(self): redis_client = self.redis_client now = util.utcnow() today = now.strftime('%Y%m%d') yesterday = (now - timedelta(hours=24)).strftime('%Y%m%d') data = { 'test': { 'v1.search': 11, 'v1.geolocate': 13 }, 'no_key_1': { 'v1.search': 12 }, 'no_key_2': { 'v1.geolocate': 15 }, } for key, paths in data.items(): for path, value in paths.items(): rate_key = 'apilimit:%s:%s:%s' % (key, path, today) redis_client.incr(rate_key, value) rate_key = 'apilimit:%s:%s:%s' % (key, path, yesterday) redis_client.incr(rate_key, value - 10) api_keys = [ ApiKey(valid_key='no_key_1', shortname='shortname_1'), ApiKey(valid_key='no_key_2'), ApiKey(valid_key='no_key_3', shortname='shortname_3'), ] self.session.add_all(api_keys) self.session.flush() # add some other items into Redis redis_client.lpush('default', 1, 2) redis_client.set('cache_something', '{}') result = monitor_api_key_limits.delay().get() self.check_stats(gauge=[ ('api.limit', ['key:test', 'path:v1.geolocate']), ('api.limit', ['key:test', 'path:v1.search']), ('api.limit', ['key:shortname_1', 'path:v1.search']), ('api.limit', ['key:no_key_2', 'path:v1.geolocate']), ]) self.assertDictEqual( result, { 'test': { 'v1.search': 11, 'v1.geolocate': 13 }, 'shortname_1': { 'v1.search': 12 }, 'no_key_2': { 'v1.geolocate': 15 }, })
def setup_package(module): # make sure all models are imported from ichnaea.models import base # NOQA from ichnaea.models import content # NOQA db = _make_db() engine = db.engine DBIsolation.cleanup_tables(engine) DBIsolation.setup_tables(engine) # always add a test API key session = db.session() session.add(ApiKey(valid_key='test', log=True, shortname='test')) session.add(ApiKey(valid_key='export', log=False, shortname='export')) session.commit() session.close() db.engine.pool.dispose()
def test_stats(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) self._update_all() self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:internal']), ('data.report.upload', 2, 3), ('data.report.upload', 1, 3, ['key:test']), ('data.report.upload', 1, 6, ['key:e5444-794']), ('data.observation.upload', 1, 3, ['type:cell', 'key:test']), ('data.observation.upload', 1, 6, ['type:wifi', 'key:test']), ('data.observation.upload', 0, ['type:cell', 'key:no_key']), ('data.observation.upload', 1, 6, ['type:cell', 'key:e5444-794']), ('data.observation.upload', 1, 12, ['type:wifi', 'key:e5444-794']), ]) # we get a variable number of statsd messages and are only # interested in the sum-total for name, total in (('cell', 12), ('wifi', 24)): insert_msgs = [ msg for msg in self.stats_client.msgs if (msg.startswith('data.observation.insert') and 'type:' + name in msg) ] self.assertEqual( sum([ int(msg.split(':')[1].split('|')[0]) for msg in insert_msgs ]), total)
def test_fallback_used_when_geoip_also_present(self): cells = CellFactory.build_batch(2, radio=Radio.wcdma) wifis = WifiFactory.build_batch(3) api_key = ApiKey.getkey(self.session, 'test') api_key.allow_fallback = True self.session.flush() with requests_mock.Mocker() as mock: response_location = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100, } mock.register_uri( 'POST', requests_mock.ANY, json=response_location) query = self.model_query(cells=cells, wifis=wifis) res = self._call(body=query, ip=self.test_ip) send_json = mock.request_history[0].json() self.assertEqual(len(send_json['cellTowers']), 2) self.assertEqual(len(send_json['wifiAccessPoints']), 3) self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) self.check_stats( timer=[self.metric_url], counter=[self.metric + '.api_key.test', self.metric + '.fallback_hit', self.metric_url + '.200', self.metric + '.api_log.test.fallback_hit'], )
def monitor_api_key_limits(self): result = {} try: today = util.utcnow().strftime('%Y%m%d') keys = self.redis_client.keys('apilimit:*:' + today) if keys: values = self.redis_client.mget(keys) keys = [k.split(':')[1] for k in keys] else: values = [] names = {} if keys: with self.db_session(commit=False) as session: query = (ApiKey.querykeys(session, keys) .options(load_only('valid_key', 'shortname'))) for api_key in query.all(): names[api_key.valid_key] = api_key.name result = {} for k, v in zip(keys, values): name = names.get(k, k) value = int(v) result[name] = value self.stats_client.gauge('apilimit.' + name, value) except Exception: # pragma: no cover # Log but ignore the exception self.raven_client.captureException() return result
def monitor_api_key_limits(self): result = {} try: today = util.utcnow().strftime("%Y%m%d") keys = self.redis_client.keys("apilimit:*:" + today) if keys: values = self.redis_client.mget(keys) keys = [k.decode("utf-8").split(":")[1] for k in keys] else: values = [] names = {} if keys: with self.db_session(commit=False) as session: api_iter = ApiKey.iterkeys( session, keys, extra=lambda query: query.options(load_only("valid_key", "shortname")) ) for api_key in api_iter: names[api_key.valid_key] = api_key.name result = {} for k, v in zip(keys, values): name = names.get(k, k) value = int(v) result[name] = value self.stats_client.gauge("apilimit." + name, value) except Exception: # pragma: no cover # Log but ignore the exception self.raven_client.captureException() return result
def monitor_api_key_limits(self): result = {} try: today = util.utcnow().strftime("%Y%m%d") keys = self.redis_client.keys('apilimit:*:' + today) if keys: values = self.redis_client.mget(keys) keys = [k.split(':')[1] for k in keys] else: values = [] names = {} if keys: with self.db_session() as session: query = (ApiKey.querykeys(session, keys).options( load_only('valid_key', 'shortname'))) for api_key in query.all(): names[api_key.valid_key] = api_key.name result = {} for k, v in zip(keys, values): name = names.get(k, k) value = int(v) result[name] = value self.stats_client.gauge('apilimit.' + name, value) except Exception: # pragma: no cover # Log but ignore the exception self.raven_client.captureException() return result
def setUp(self): super(TestExporter, self).setUp() config = DummyConfig({ 'export:test': { 'url': None, 'skip_keys': 'export_source', 'batch': '3', }, 'export:everything': { 'url': '', 'batch': '5', }, 'export:no_test': { 'skip_keys': 'test_1 test\ntest:-1', 'batch': '2', }, 'export:invalid_ftp': { 'url': 'ftp://127.0.0.1:9/', 'batch': '5', }, 'export:invalid': { 'url': 'no_url', 'batch': '5', }, }) self.celery_app.export_queues = queues = configure_export( self.redis_client, config) self.test_queue_key = queues['test'].queue_key() self.session.add(ApiKey(valid_key='test2', log_submit=True)) self.session.flush()
def install_apikey(self): session = self.db.session() if session.query(ApiKey).filter( ApiKey.valid_key == 'test').count() > 0: session.query(ApiKey).delete() session.add(ApiKey(valid_key='test', maxreq=0)) session.commit()
def setUp(self): super(ProviderTest, self).setUp() self.provider = self.TestProvider( session_db=self.session, geoip_db=self.geoip_db, api_key=ApiKey(shortname='test', log=True), api_name='m', )
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): schedule_export_reports.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in uploaded_text) self.assertFalse(self.geoip_data['London']['ip'] in uploaded_text) send_reports = json.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_api_key_limit(self): api_key = uuid.uuid1().hex self.session.add(ApiKey(valid_key=api_key, maxreq=5, shortname='dis')) self.session.flush() # exhaust today's limit dstamp = util.utcnow().strftime('%Y%m%d') path = self.metric_path.split(':')[-1] key = 'apilimit:%s:%s:%s' % (api_key, path, dstamp) self.redis_client.incr(key, 10) res = self._call(api_key=api_key, ip=self.test_ip, status=403) self.check_response(res, 'limit_exceeded')
def test_monitor_api_key_limits_multiple(self): redis_client = self.redis_client session = self.db_master_session now = util.utcnow() today = now.strftime("%Y%m%d") yesterday = (now - timedelta(hours=24)).strftime("%Y%m%d") data = { 'test': 11, 'no_key_1': 12, 'no_key_2': 15, } for k, v in data.items(): key = "apilimit:%s:%s" % (k, today) redis_client.incr(key, v) key = "apilimit:%s:%s" % (k, yesterday) redis_client.incr(key, v - 10) api_keys = [ ApiKey(valid_key='no_key_1', shortname='shortname_1'), ApiKey(valid_key='no_key_2'), ApiKey(valid_key='no_key_3', shortname='shortname_3'), ] session.add_all(api_keys) session.flush() # add some other items into Redis redis_client.lpush('default', 1, 2) redis_client.set('cache_something', '{}') result = monitor_api_key_limits.delay().get() self.check_stats(gauge=[ 'apilimit.test', 'apilimit.shortname_1', 'apilimit.no_key_2' ], ) self.assertDictEqual(result, { 'test': 11, 'shortname_1': 12, 'no_key_2': 15 })
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = [] reports.extend( self.add_reports(1, email='secretemail@localhost', ip=self.geoip_data['London']['ip'])) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = util.decode_gzip(req.body) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in body) self.assertFalse(self.geoip_data['London']['ip'] in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_fallback(self, app, ro_session, stats): # this tests a cell + wifi based query which gets a cell based # internal result and continues on to the fallback to get a # better wifi based result cells = CellShardFactory.create_batch( 2, session=ro_session, radio=Radio.wcdma) wifis = WifiShardFactory.build_batch(3) api_key = ApiKey.get(ro_session, 'test') api_key.allow_fallback = True ro_session.flush() with requests_mock.Mocker() as mock: response_result = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100, } mock.register_uri( 'POST', requests_mock.ANY, json=response_result) query = self.model_query(cells=cells, wifis=wifis) res = self._call(app, body=query) send_json = mock.request_history[0].json() assert len(send_json['cellTowers']) == 2 assert len(send_json['wifiAccessPoints']) == 3 assert send_json['cellTowers'][0]['radioType'] == 'wcdma' self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) stats.check(counter=[ ('request', [self.metric_path, 'method:post', 'status:200']), (self.metric_type + '.request', [self.metric_path, 'key:test']), (self.metric_type + '.query', ['key:test', 'region:none', 'geoip:false', 'blue:none', 'cell:many', 'wifi:many']), (self.metric_type + '.result', ['key:test', 'region:none', 'fallback_allowed:true', 'accuracy:high', 'status:hit', 'source:fallback']), (self.metric_type + '.source', ['key:test', 'region:none', 'source:internal', 'accuracy:high', 'status:miss']), (self.metric_type + '.source', ['key:test', 'region:none', 'source:fallback', 'accuracy:high', 'status:hit']), ], timer=[ ('request', [self.metric_path, 'method:post']), ])
def test_fallback(self): # this tests a cell + wifi based query which gets a cell based # internal result and continues on to the fallback to get a # better wifi based result cells = CellShardFactory.create_batch(2, radio=Radio.wcdma) wifis = WifiShardFactory.build_batch(3) api_key = ApiKey.get(self.session, 'test') api_key.allow_fallback = True self.session.flush() with requests_mock.Mocker() as mock: response_result = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100, } mock.register_uri( 'POST', requests_mock.ANY, json=response_result) query = self.model_query(cells=cells, wifis=wifis) res = self._call(body=query) send_json = mock.request_history[0].json() self.assertEqual(len(send_json['cellTowers']), 2) self.assertEqual(len(send_json['wifiAccessPoints']), 3) self.assertEqual(send_json['cellTowers'][0]['radioType'], 'wcdma') self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) self.check_stats(counter=[ ('request', [self.metric_path, 'method:post', 'status:200']), (self.metric_type + '.request', [self.metric_path, 'key:test']), (self.metric_type + '.query', ['key:test', 'region:none', 'geoip:false', 'blue:none', 'cell:many', 'wifi:many']), (self.metric_type + '.result', ['key:test', 'region:none', 'fallback_allowed:true', 'accuracy:high', 'status:hit', 'source:fallback']), (self.metric_type + '.source', ['key:test', 'region:none', 'source:internal', 'accuracy:high', 'status:miss']), (self.metric_type + '.source', ['key:test', 'region:none', 'source:fallback', 'accuracy:high', 'status:hit']), ], timer=[ ('request', [self.metric_path, 'method:post']), ])
def insert_measures(self, items=None, nickname='', email='', api_key_text=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.db_session() as session: api_key = api_key_text and ApiKey.getkey(session, api_key_text) queue = ReportQueueV1(self, session, api_key, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports, nickname=nickname, email=email) session.commit() return length
def test_api_key_limit(self): london = self.geoip_data['London'] api_key = uuid1().hex self.session.add(ApiKey(valid_key=api_key, maxreq=5, shortname='dis')) self.session.flush() # exhaust today's limit dstamp = util.utcnow().strftime("%Y%m%d") key = "apilimit:%s:%s" % (api_key, dstamp) self.redis_client.incr(key, 10) res = self.app.post_json( '%s?key=%s' % (self.url, api_key), {}, extra_environ={'HTTP_X_FORWARDED_FOR': london['ip']}, status=403) errors = res.json['error']['errors'] self.assertEqual(errors[0]['reason'], 'dailyLimitExceeded')
def test_fallback_used_when_geoip_also_present(self): london = self.geoip_data['London'] cell_key = dict(mcc=FRANCE_MCC, mnc=2, lac=3) api_key = ApiKey.getkey(self.session, 'test') api_key.allow_fallback = True self.session.commit() with requests_mock.Mocker() as mock: response_location = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100, } mock.register_uri( 'POST', requests_mock.ANY, json=response_location) res = self.app.post_json( '/v1/search?key=test', {'radio': Radio.gsm.name, 'cell': [ dict(radio=Radio.umts.name, cid=4, **cell_key), dict(radio=Radio.umts.name, cid=5, **cell_key), ]}, extra_environ={'HTTP_X_FORWARDED_FOR': london['ip']}, status=200) send_json = mock.request_history[0].json() self.assertEqual(len(send_json['cellTowers']), 2) self.assertEqual(send_json['cellTowers'][0]['radioType'], 'wcdma') self.assertEqual(res.content_type, 'application/json') self.assertEqual(res.json, {'status': 'ok', 'lat': 1.0, 'lon': 1.0, 'accuracy': 100}) self.check_stats( timer=[('request.v1.search', 1)], counter=[('search.api_key.test', 1), ('search.fallback_hit', 1), ('request.v1.search.200', 1), ('search.api_log.test.fallback_hit', 1)], )
def insert_measures(self, items=None, email=None, ip=None, nickname=None, api_key_text=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.redis_pipeline() as pipe: with self.db_session() as session: api_key = api_key_text and ApiKey.getkey(session, api_key_text) queue = ReportQueue(self, session, pipe, api_key=api_key, email=email, ip=ip, nickname=nickname, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports) return length
def _make_query( self, data=None, TestLocation=None, # NOQA TestProvider=None, TestSearcher=None): if not TestLocation: class TestLocation(Location): def accurate_enough(self): return False def found(self): return False def more_accurate(self, other): return False if not TestProvider: class TestProvider(Provider): location_type = TestLocation log_name = 'test' def locate(self, data): return self.location_type() if not TestSearcher: class TestSearcher(Searcher): provider_classes = (('test', (TestProvider, )), ) def _prepare(self, location): return location return TestSearcher( session_db=self.session, geoip_db=self.geoip_db, api_key=ApiKey(shortname='test', log=True), api_name='m', ).search(data)
def test_fallback_used_with_geoip(self, app, ro_session, stats): cells = CellShardFactory.create_batch( 2, session=ro_session, radio=Radio.wcdma) wifis = WifiShardFactory.build_batch(3) api_key = ApiKey.get(ro_session, 'test') api_key.allow_fallback = True ro_session.flush() with requests_mock.Mocker() as mock: response_result = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100.0, } mock.register_uri( 'POST', requests_mock.ANY, json=response_result) query = self.model_query(cells=cells, wifis=wifis) res = self._call(app, body=query, ip=self.test_ip) send_json = mock.request_history[0].json() assert len(send_json['cellTowers']) == 2 assert len(send_json['wifiAccessPoints']) == 3 self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) stats.check(counter=[ ('request', [self.metric_path, 'method:post', 'status:200']), (self.metric_type + '.request', [self.metric_path, 'key:test']), (self.metric_type + '.result', ['key:test', 'region:GB', 'fallback_allowed:true', 'accuracy:high', 'status:hit', 'source:fallback']), (self.metric_type + '.source', ['key:test', 'region:GB', 'source:fallback', 'accuracy:high', 'status:hit']), ], timer=[ ('request', [self.metric_path, 'method:post']), ])
def test_fallback_used_with_geoip(self): cells = CellShardFactory.create_batch(2, radio=Radio.wcdma) wifis = WifiShardFactory.build_batch(3) api_key = ApiKey.get(self.session, 'test') api_key.allow_fallback = True self.session.flush() with requests_mock.Mocker() as mock: response_result = { 'location': { 'lat': 1.0, 'lng': 1.0, }, 'accuracy': 100.0, } mock.register_uri( 'POST', requests_mock.ANY, json=response_result) query = self.model_query(cells=cells, wifis=wifis) res = self._call(body=query, ip=self.test_ip) send_json = mock.request_history[0].json() self.assertEqual(len(send_json['cellTowers']), 2) self.assertEqual(len(send_json['wifiAccessPoints']), 3) self.check_model_response(res, None, lat=1.0, lon=1.0, accuracy=100) self.check_stats(counter=[ ('request', [self.metric_path, 'method:post', 'status:200']), (self.metric_type + '.request', [self.metric_path, 'key:test']), (self.metric_type + '.result', ['key:test', 'region:GB', 'fallback_allowed:true', 'accuracy:high', 'status:hit', 'source:fallback']), (self.metric_type + '.source', ['key:test', 'region:GB', 'source:fallback', 'accuracy:high', 'status:hit']), ], timer=[ ('request', [self.metric_path, 'method:post']), ])
def country_view(request): request_data, errors = preprocess_request( request, schema=GeoLocateSchema(), response=JSONParseError, accept_empty=True, ) search_data = prepare_search_data(request_data, client_addr=request.client_addr) response = CountrySearcher( session_db=request.db_ro_session, geoip_db=request.registry.geoip_db, api_key=ApiKey(), api_name='country', ).search(search_data) if not response: response = HTTPNotFound() response.content_type = 'application/json' response.body = NOT_FOUND return response return response
def setUp(self): AppTestCase.setUp(self) session = self.db_slave_session session.add(ApiKey(valid_key='test')) session.add(ApiKey(valid_key='test.test')) session.commit()
def _make_one(self, **kw): from ichnaea.models import ApiKey return ApiKey(**kw)