def leaders_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['leaders'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = list(enumerate(leaders(session))) data = [ { 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], 'anchor': l[1]['nickname'], } for l in data] redis_client.set(cache_key, dumps(data), ex=600) half = len(data) // 2 + len(data) % 2 leaders1 = data[:half] leaders2 = data[half:] return { 'page_title': 'Leaderboard', 'leaders1': leaders1, 'leaders2': leaders2, }
def leaders_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['leaders'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = list(enumerate(leaders(session))) data = [{ 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], 'anchor': l[1]['nickname'], } for l in data] redis_client.set(cache_key, dumps(data), ex=600) half = len(data) // 2 + len(data) % 2 leaders1 = data[:half] leaders2 = data[half:] return { 'page_title': 'Leaderboard', 'leaders1': leaders1, 'leaders2': leaders2, }
def stats_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = { 'leaders': [], 'metrics1': [], 'metrics2': [], } metrics = global_stats(session) metric_names = [ (StatKey.unique_cell.name, 'MLS Cells'), (StatKey.unique_ocid_cell.name, 'OpenCellID Cells'), (StatKey.cell.name, 'MLS Cell Observations'), (StatKey.unique_wifi.name, 'Wifi Networks'), (StatKey.wifi.name, 'Wifi Observations'), ] for mid, name in metric_names[:3]: data['metrics1'].append({'name': name, 'value': metrics[mid]}) for mid, name in metric_names[3:]: data['metrics2'].append({'name': name, 'value': metrics[mid]}) redis_client.set(cache_key, dumps(data), ex=3600) result = {'page_title': 'Statistics'} result.update(data) return result
def leaders_weekly_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['leaders_weekly'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_slave_session data = { 'new_cell': {'leaders1': [], 'leaders2': []}, 'new_wifi': {'leaders1': [], 'leaders2': []}, } for name, value in leaders_weekly(session).items(): value = [ { 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], } for l in list(enumerate(value))] half = len(value) // 2 + len(value) % 2 data[name] = { 'leaders1': value[:half], 'leaders2': value[half:], } redis_client.set(cache_key, dumps(data), ex=3600) return { 'page_title': 'Weekly Leaderboard', 'scores': data, }
def leaders_weekly_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['leaders_weekly'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = { 'new_cell': {'leaders1': [], 'leaders2': []}, 'new_wifi': {'leaders1': [], 'leaders2': []}, } for name, value in leaders_weekly(session).items(): value = [ { 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], } for l in list(enumerate(value))] half = len(value) // 2 + len(value) % 2 data[name] = { 'leaders1': value[:half], 'leaders2': value[half:], } redis_client.set(cache_key, dumps(data), ex=3600) return { 'page_title': 'Weekly Leaderboard', 'scores': data, }
def dequeue_lacs(redis_client, batch=100): key = UPDATE_KEY['cell_lac'] pipe = redis_client.pipeline() pipe.multi() pipe.lrange(key, 0, batch - 1) pipe.ltrim(key, batch, -1) result = pipe.execute() items = result[0] items = [loads(i) for i in items] return items
def stats_cell_json(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats_cell_json'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_slave_session data = histogram(session, 'unique_cell') redis_client.set(cache_key, dumps(data), ex=3600) return {'series': [{'title': 'MLS Cells', 'data': data[0]}]}
def stats_wifi_json(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats_wifi_json'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = histogram(session, StatKey.unique_wifi) redis_client.set(cache_key, dumps(data), ex=3600) return {'series': [{'title': 'MLS WiFi', 'data': data[0]}]}
def stats_countries_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats_countries'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session data = countries(session) redis_client.set(cache_key, dumps(data), ex=3600) return {'page_title': 'Cell Statistics', 'metrics': data}
def downloads_view(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['downloads'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: settings = self.request.registry.settings assets_bucket = settings['ichnaea']['s3_assets_bucket'] assets_url = settings['ichnaea']['assets_url'] raven_client = self.request.registry.raven_client data = s3_list_downloads(assets_bucket, assets_url, raven_client) # cache the download files, expire after 10 minutes redis_client.set(cache_key, dumps(data), ex=600) return {'page_title': 'Downloads', 'files': data}
def stats_cell_json(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats_cell_json'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_slave_session mls_data = histogram(session, StatKey.unique_cell) ocid_data = histogram(session, StatKey.unique_ocid_cell) data = [ {'title': 'MLS Cells', 'data': mls_data[0]}, {'title': 'OCID Cells', 'data': ocid_data[0]}, ] redis_client.set(cache_key, dumps(data), ex=3600) return {'series': data}
def stats_cell_json(self): redis_client = self.request.registry.redis_client cache_key = CACHE_KEYS['stats_cell_json'] cached = redis_client.get(cache_key) if cached: data = loads(cached) else: session = self.request.db_ro_session mls_data = histogram(session, StatKey.unique_cell) ocid_data = histogram(session, StatKey.unique_ocid_cell) data = [ {'title': 'MLS Cells', 'data': mls_data[0]}, {'title': 'OCID Cells', 'data': ocid_data[0]}, ] redis_client.set(cache_key, dumps(data), ex=3600) return {'series': data}
def preprocess_request(request, schema, extra_checks=(), response=JSONError, accept_empty=False): body = {} errors = [] validated = {} body = request.body if body: if request.headers.get('Content-Encoding') == 'gzip': # handle gzip request bodies try: body = zlib.decompress(body, 16 + zlib.MAX_WBITS) except zlib.error: # pragma: no cover errors.append(dict(name=None, description=MSG_GZIP)) if not errors: try: body = loads(body, encoding=request.charset) except ValueError as e: errors.append(dict(name=None, description=e.message)) else: # pragma: no cover errors.append(dict(name=None, description=MSG_EMPTY)) if accept_empty and body == {}: return ({}, errors) if not body or (errors and response is not None): if response is not None: request.registry.heka_client.error('error_handler' + repr(errors)) raise response(errors) # schema validation, but report at most one error at a time verify_schema(schema, body, errors, validated) for func in extra_checks: func(validated, errors) if errors and response is not None: # the response / None check is used in schema tests request.registry.heka_client.error('error_handler' + repr(errors)) raise response(errors) return (validated, errors)
def insert_measures(self, items=None, nickname='', email=''): if not items: # pragma: no cover return 0 items = loads(items) length = len(items) try: with self.db_session() as session: userid, nickname, email = process_user(nickname, email, session) process_measures(items, session, userid=userid) self.stats_client.incr("items.uploaded.batches", count=length) session.commit() return length except Exception as exc: # pragma: no cover self.heka_client.raven('error') raise self.retry(exc=exc)
def test_unknown_api_key(self): app = self.app session = self.db_slave_session key = dict(mcc=FRANCE_MCC, mnc=2, lac=3, cid=4) session.add( Cell(lat=PARIS_LAT, lon=PARIS_LON, radio=RADIO_TYPE['umts'], **key)) session.commit() res = app.post_json('/v1/search?key=unknown_key', { "radio": "gsm", "cell": [ dict(radio="umts", **key), ] }, status=400) self.assertEqual(res.json, loads(INVALID_API_KEY)) self.check_stats(counter=['search.unknown_api_key'])
def insert_measures(self, items=None, nickname=''): if not items: # pragma: no cover return 0 items = loads(items) length = len(items) try: with self.db_session() as session: userid, nickname = process_user(nickname, session) process_measures(items, session, userid=userid) self.stats_client.incr("items.uploaded.batches", count=length) session.commit() return length except Exception as exc: # pragma: no cover self.heka_client.raven('error') raise self.retry(exc=exc)
def test_unknown_api_key(self): app = self.app session = self.db_slave_session key = dict(mcc=FRANCE_MCC, mnc=2, lac=3, cid=4) session.add(Cell( lat=PARIS_LAT, lon=PARIS_LON, radio=RADIO_TYPE['umts'], **key) ) session.commit() res = app.post_json( '/v1/search?key=unknown_key', {"radio": "gsm", "cell": [ dict(radio="umts", **key), ]}, status=400) self.assertEqual(res.json, loads(INVALID_API_KEY)) self.check_stats(counter=['search.unknown_api_key'])
def test_unknown_api_key(self): app = self.app session = self.session key = dict(mcc=FRANCE_MCC, mnc=2, lac=3, cid=4) session.add(Cell( lat=PARIS_LAT, lon=PARIS_LON, range=1000, radio=Radio.umts, **key) ) session.commit() res = app.post_json( '/v1/search?key=unknown_key', {'radio': Radio.gsm.name, 'cell': [ dict(radio=Radio.umts.name, **key), ]}, status=400) self.assertEqual(res.json, loads(INVALID_API_KEY)) self.check_stats(counter=['search.unknown_api_key'])
def preprocess_request(request, schema, response=JSONError, accept_empty=False): body = {} errors = [] validated = {} body = request.body if body: if request.headers.get('Content-Encoding') == 'gzip': # handle gzip request bodies try: body = util.decode_gzip(body) except zlib.error: # pragma: no cover errors.append(dict(name=None, description=MSG_GZIP)) if not errors: try: body = loads(body, encoding=request.charset) except ValueError as e: errors.append(dict(name=None, description=e.message)) else: # pragma: no cover errors.append(dict(name=None, description=MSG_EMPTY)) if accept_empty and not body: return ({}, errors) if not body or (errors and response is not None): if response is not None: raise response(errors) # schema validation, but report at most one error at a time verify_schema(schema, body, errors, validated) if errors and response is not None: # the response / None check is used in schema tests raise response(errors) return (validated, errors)
def test_unknown_api_key(self): app = self.app session = self.session key = dict(mcc=FRANCE_MCC, mnc=2, lac=3, cid=4) session.add( Cell(lat=PARIS_LAT, lon=PARIS_LON, range=1000, radio=Radio.umts, **key)) session.commit() res = app.post_json('/v1/search?key=unknown_key', { "radio": Radio.gsm.name, "cell": [ dict(radio=Radio.umts.name, **key), ] }, status=400) self.assertEqual(res.json, loads(INVALID_API_KEY)) self.check_stats(counter=['search.unknown_api_key'])