def export(self, export_task, upload_task): length = self.queue_length() if length < self.batch: # pragma: no cover # not enough to do, skip return 0 queued_items = self.dequeue_reports() if queued_items and len(queued_items) < self.batch: # pragma: no cover # race condition, something emptied the queue in between # our llen call and fetching the items, put them back self.redis_client.lpush(self.redis_key, *queued_items) return 0 # schedule the upload task items = [kombu_loads(item) for item in queued_items] # split out metadata reports = [item['report'] for item in items] upload_task.delay(self.export_name, dumps({'items': reports})) # check the queue at the end, if there's still enough to do # schedule another job, but give it a second before it runs if self.queue_length() >= self.batch: export_task.apply_async(args=[self.export_name], countdown=1, expires=300) return len(queued_items)
def insert_measures(self, items=None, nickname='', email='', api_key_log=False, api_key_name=None): if not items: # pragma: no cover return 0 items = kombu_loads(items) length = len(items) stats_client = self.stats_client with self.db_session() as session: userid, nickname, email = process_user(nickname, email, session) process_observations(items, session, userid=userid, api_key_log=api_key_log, api_key_name=api_key_name) stats_client.incr('items.uploaded.reports', length) if api_key_log: stats_client.incr('items.api_log.%s.uploaded.reports' % api_key_name) session.commit() return length
def test_customjson(self): now = util.utcnow() report_id = uuid.uuid1() obs = CellObservation.create( radio=Radio.gsm, mcc=GB_MCC, mnc=5, lac=12345, cid=23456, report_id=report_id, lat=GB_LAT, lon=GB_LON, created=now) json_data = kombu_dumps(obs) self.assertTrue('accuracy' not in json_data) result = kombu_loads(json_data) self.assertTrue(type(result), CellObservation) self.assertTrue(result.accuracy is None) self.assertEqual(type(result.report_id), uuid.UUID) self.assertEqual(result.report_id, report_id) self.assertEqual(type(result.radio), Radio) self.assertEqual(result.radio, Radio.gsm) self.assertEqual(result.mcc, GB_MCC) self.assertEqual(result.mnc, 5) self.assertEqual(result.lac, 12345) self.assertEqual(result.cid, 23456) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON) self.assertEqual(type(result.created), datetime.datetime) self.assertEqual(result.created, now)
def export(self, export_task, upload_task): length = self.queue_length() if length < self.batch: # pragma: no cover # not enough to do, skip return 0 queued_items = self.dequeue_reports() if queued_items and len(queued_items) < self.batch: # pragma: no cover # race condition, something emptied the queue in between # our llen call and fetching the items, put them back self.redis_client.lpush(self.redis_key, *queued_items) return 0 # schedule the upload task items = [kombu_loads(item) for item in queued_items] # split out metadata reports = [item['report'] for item in items] upload_task.delay(self.export_name, dumps({'items': reports})) # check the queue at the end, if there's still enough to do # schedule another job, but give it a second before it runs if self.queue_length() >= self.batch: export_task.apply_async( args=[self.export_name], countdown=1, expires=300) return len(queued_items)
def _dequeue(self, queue_key, batch): with self.redis_client.pipeline() as pipe: pipe.multi() pipe.lrange(queue_key, 0, batch - 1) if batch != 0: pipe.ltrim(queue_key, batch, -1) else: # special case for deleting everything pipe.ltrim(queue_key, 1, 0) result = [kombu_loads(item) for item in pipe.execute()[0]] return result
def insert_measures(self, items=None, nickname='', email='', api_key_log=False, api_key_name=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.db_session() as session: queue = ReportQueue(self, session, api_key_log=api_key_log, api_key_name=api_key_name, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports, nickname=nickname, email=email) session.commit() return length
def insert_measures(self, items=None, nickname='', email='', api_key_log=False, api_key_name=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.db_session() as session: queue = ReportQueueV1(self, session, api_key_log=api_key_log, api_key_name=api_key_name, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports, nickname=nickname, email=email) session.commit() return length
def insert_measures(self, items=None, email=None, ip=None, nickname=None, api_key_text=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.redis_pipeline() as pipe: with self.db_session() as session: api_key = api_key_text and ApiKey.getkey(session, api_key_text) queue = ReportQueue(self, session, pipe, api_key=api_key, email=email, ip=ip, nickname=nickname, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports) return length
def insert_measures(self, items=None, nickname='', email='', api_key_text=None): if not items: # pragma: no cover return 0 reports = kombu_loads(items) with self.db_session() as session: api_key = api_key_text and ApiKey.getkey(session, api_key_text) queue = ReportQueueV1(self, session, api_key, insert_cell_task=insert_measures_cell, insert_wifi_task=insert_measures_wifi) length = queue.insert(reports, nickname=nickname, email=email) session.commit() return length
def test_customjson(self): key = '3680873e9b83' now = util.utcnow() report_id = uuid.uuid1() obs = WifiObservation.create( key=key, report_id=report_id, lat=GB_LAT, lon=GB_LON, created=now) json_data = kombu_dumps(obs) self.assertTrue('accuracy' not in json_data) result = kombu_loads(json_data) self.assertTrue(type(result), WifiObservation) self.assertTrue(result.accuracy is None) self.assertEqual(type(result.report_id), uuid.UUID) self.assertEqual(result.report_id, report_id) self.assertEqual(result.key, key) self.assertEqual(result.lat, GB_LAT) self.assertEqual(result.lon, GB_LON) self.assertEqual(type(result.created), datetime.datetime) self.assertEqual(result.created, now)
def test_uuid1_roundtrip(self): test_uuid = uuid.uuid1() data = kombu_loads(kombu_dumps({'d': test_uuid})) self.assertEqual(data['d'], test_uuid) self.assertEqual(data['d'].version, 1)
def dequeue_areas(redis_client, pipeline_key, batch=100): pipe = redis_client.pipeline() pipe.multi() pipe.lrange(pipeline_key, 0, batch - 1) pipe.ltrim(pipeline_key, batch, -1) return [kombu_loads(item) for item in pipe.execute()[0]]
def test_datetime_us_roundtrip(self): us = pytz.timezone('US/Eastern') test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us) data = kombu_loads(kombu_dumps({'d': test_date})) self.assertEqual(test_date, data['d']) self.assertTrue(data['d'].tzinfo is pytz.utc)
def test_datetime_roundtrip(self): test_date = datetime(2012, 5, 17, 14, 28, 56) data = kombu_loads(kombu_dumps({'d': test_date})) self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
def test_date_roundtrip(self): test_date = date(2012, 5, 17) data = kombu_loads(kombu_dumps({'d': test_date})) self.assertEqual(test_date, data['d'])
def test_uuid4_roundtrip(self): test_uuid = uuid.uuid4() data = kombu_loads(kombu_dumps({'d': test_uuid})) self.assertEqual(data['d'], test_uuid) self.assertEqual(data['d'].version, 4)
def test_datetime_utc_roundtrip(self): test_date = util.utcnow() data = kombu_loads(kombu_dumps({'d': test_date})) self.assertEqual(test_date, data['d'])
def test_namedtuple(self): Named = namedtuple('Named', 'one two') data = kombu_loads(kombu_dumps({'d': Named(one=1, two=[2])})) self.assertEqual(data['d'], {'one': 1, 'two': [2]})