def leaders_view(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['leaders'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session data = list(enumerate(leaders(session))) data = [ { 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], 'anchor': l[1]['nickname'], } for l in data] redis_client.set(cache_key, internal_dumps(data), ex=1800) half = len(data) // 2 + len(data) % 2 leaders1 = data[:half] leaders2 = data[half:] return { 'page_title': 'Leaderboard', 'leaders1': leaders1, 'leaders2': leaders2, }
def _enqueue(self, items, queue_key, batch=100, expire=False, pipe=None): data = [str(internal_dumps(item)) for item in items] if pipe is not None: self._push(pipe, data, queue_key, batch=batch, expire=expire) else: with redis_pipeline(self.redis_client) as pipe: self._push(pipe, data, queue_key, batch=batch, expire=expire)
def leaders_weekly_view(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['leaders_weekly'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session data = { 'new_cell': {'leaders1': [], 'leaders2': []}, 'new_wifi': {'leaders1': [], 'leaders2': []}, } for name, value in leaders_weekly(session).items(): value = [ { 'pos': l[0] + 1, 'num': l[1]['num'], 'nickname': l[1]['nickname'], } for l in list(enumerate(value))] half = len(value) // 2 + len(value) % 2 data[name] = { 'leaders1': value[:half], 'leaders2': value[half:], } redis_client.set(cache_key, internal_dumps(data), ex=3600) return { 'page_title': 'Weekly Leaderboard', 'scores': data, }
def stats_view(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['stats'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session data = { 'leaders': [], 'metrics1': [], 'metrics2': [], } metrics = global_stats(session) metric_names = [ (StatKey.unique_cell.name, 'MLS Cells'), (StatKey.unique_cell_ocid.name, 'OpenCellID Cells'), (StatKey.cell.name, 'MLS Cell Observations'), (StatKey.unique_wifi.name, 'Wifi Networks'), (StatKey.wifi.name, 'Wifi Observations'), ] for mid, name in metric_names[:3]: data['metrics1'].append({'name': name, 'value': metrics[mid]}) for mid, name in metric_names[3:]: data['metrics2'].append({'name': name, 'value': metrics[mid]}) redis_client.set(cache_key, internal_dumps(data), ex=3600) result = {'page_title': 'Statistics'} result.update(data) return result
def test_internaljson(self): mac = '3680873e9b83' obs = WifiObservation.create(key=mac, lat=GB_LAT, lon=GB_LON) result = internal_loads(internal_dumps(obs)) self.assertTrue(type(result), WifiObservation) self.assertTrue(result.accuracy is None) self.assertEqual(result.mac, mac) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON)
def stats_wifi_json(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['stats_wifi_json'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session data = histogram(session, StatKey.unique_wifi) redis_client.set(cache_key, internal_dumps(data), ex=3600) return {'series': [{'title': 'MLS WiFi', 'data': data[0]}]}
def test_internaljson(self): mac = '3680873e9b83' obs = WifiObservation.create( key=mac, lat=GB_LAT, lon=GB_LON) result = internal_loads(internal_dumps(obs)) self.assertTrue(type(result), WifiObservation) self.assertTrue(result.accuracy is None) self.assertEqual(result.mac, mac) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON)
def _enqueue(self, items, queue_key, batch=100, pipe=None, json=True): if json: data = [str(internal_dumps(item)) for item in items] else: # make a copy, since _push is modifying the list in-place data = list(items) if pipe is not None: self._push(pipe, data, queue_key, batch=batch) else: with redis_pipeline(self.redis_client) as pipe: self._push(pipe, data, queue_key, batch=batch)
def stats_regions_view(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['stats_regions'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session data = regions(session) redis_client.set(cache_key, internal_dumps(data), ex=3600) return {'page_title': 'Region Statistics', 'metrics': data}
def downloads_view(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['downloads'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: settings = self.request.registry.settings assets_bucket = settings['assets']['bucket'] assets_url = settings['assets']['url'] raven_client = self.request.registry.raven_client data = s3_list_downloads(assets_bucket, assets_url, raven_client) # cache the download files redis_client.set(cache_key, internal_dumps(data), ex=1800) return {'page_title': 'Downloads', 'files': data}
def test_internaljson(self): obs = CellObservation.create( radio=Radio.gsm, mcc=GB_MCC, mnc=5, lac=12345, cid=23456, lat=GB_LAT, lon=GB_LON) result = internal_loads(internal_dumps(obs)) self.assertTrue(type(result), CellObservation) self.assertTrue(result.accuracy is None) self.assertEqual(type(result.radio), Radio) self.assertEqual(result.radio, Radio.gsm) self.assertEqual(result.mcc, GB_MCC) self.assertEqual(result.mnc, 5) self.assertEqual(result.lac, 12345) self.assertEqual(result.cid, 23456) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON)
def stats_cell_json(self): redis_client = self.request.registry.redis_client cache_key = redis_client.cache_keys['stats_cell_json'] cached = redis_client.get(cache_key) if cached: data = internal_loads(cached) else: session = self.request.db_ro_session mls_data = histogram(session, StatKey.unique_cell) ocid_data = histogram(session, StatKey.unique_cell_ocid) data = [ {'title': 'MLS Cells', 'data': mls_data[0]}, {'title': 'OCID Cells', 'data': ocid_data[0]}, ] redis_client.set(cache_key, internal_dumps(data), ex=3600) return {'series': data}
def test_internaljson(self): obs = CellObservation.create(radio=Radio.gsm, mcc=GB_MCC, mnc=5, lac=12345, cid=23456, lat=GB_LAT, lon=GB_LON) result = internal_loads(internal_dumps(obs)) self.assertTrue(type(result), CellObservation) self.assertTrue(result.accuracy is None) self.assertEqual(type(result.radio), Radio) self.assertEqual(result.radio, Radio.gsm) self.assertEqual(result.mcc, GB_MCC) self.assertEqual(result.mnc, 5) self.assertEqual(result.lac, 12345) self.assertEqual(result.cid, 23456) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON)
def test_datetime_utc_roundtrip(self): test_date = util.utcnow() data = internal_loads(internal_dumps({'d': test_date})) self.assertEqual(test_date, data['d'])
def test_datetime_roundtrip(self): test_date = datetime(2012, 5, 17, 14, 28, 56) data = internal_loads(internal_dumps({'d': test_date})) self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
def test_datetime_dump(self): data = internal_dumps({'d': datetime(2012, 5, 17, 14, 28, 56)}) self.assertTrue('__datetime__' in data)
def test_date_roundtrip(self): test_date = date(2012, 5, 17) data = internal_loads(internal_dumps({'d': test_date})) self.assertEqual(test_date, data['d'])
def test_date_dump(self): data = internal_dumps({'d': date(2012, 5, 17)}) self.assertTrue('__date__' in data)
def test_json(self): double = Double(one=1.1, two='two') new_double = internal_loads(internal_dumps(double)) self.assertTrue(isinstance(new_double, Double)) self.assertEqual(double, new_double)
def test_datetime_us_roundtrip(self): us = pytz.timezone('US/Eastern') test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us) data = internal_loads(internal_dumps({'d': test_date})) self.assertEqual(test_date, data['d']) self.assertTrue(data['d'].tzinfo is pytz.utc)
def test_namedtuple(self): Named = namedtuple('Named', 'one two') data = internal_loads(internal_dumps({'d': Named(one=1, two=[2])})) self.assertEqual(data['d'], {'one': 1, 'two': [2]})
def test_uuid4(self): data = internal_dumps({'d': uuid.uuid4()}) self.assertTrue('__uuid__' in data)
def test_uuid4_roundtrip(self): test_uuid = uuid.uuid4() data = internal_loads(internal_dumps({'d': test_uuid})) self.assertEqual(data['d'], test_uuid) self.assertEqual(data['d'].version, 4)