示例#1
0
文件: export.py 项目: thebent/ichnaea
    def export(self, export_task, upload_task):
        length = self.queue_length()
        if length < self.batch:  # pragma: no cover
            # not enough to do, skip
            return 0

        queued_items = self.dequeue_reports()
        if queued_items and len(queued_items) < self.batch:  # pragma: no cover
            # race condition, something emptied the queue in between
            # our llen call and fetching the items, put them back
            self.redis_client.lpush(self.redis_key, *queued_items)
            return 0

        # schedule the upload task
        items = [kombu_loads(item) for item in queued_items]
        # split out metadata
        reports = [item['report'] for item in items]

        upload_task.delay(self.export_name, dumps({'items': reports}))

        # check the queue at the end, if there's still enough to do
        # schedule another job, but give it a second before it runs
        if self.queue_length() >= self.batch:
            export_task.apply_async(args=[self.export_name],
                                    countdown=1,
                                    expires=300)

        return len(queued_items)
示例#2
0
def insert_measures(self,
                    items=None,
                    nickname='',
                    email='',
                    api_key_log=False,
                    api_key_name=None):
    if not items:  # pragma: no cover
        return 0

    items = kombu_loads(items)
    length = len(items)
    stats_client = self.stats_client

    with self.db_session() as session:
        userid, nickname, email = process_user(nickname, email, session)

        process_observations(items,
                             session,
                             userid=userid,
                             api_key_log=api_key_log,
                             api_key_name=api_key_name)
        stats_client.incr('items.uploaded.reports', length)
        if api_key_log:
            stats_client.incr('items.api_log.%s.uploaded.reports' %
                              api_key_name)

        session.commit()
    return length
    def test_customjson(self):
        now = util.utcnow()
        report_id = uuid.uuid1()
        obs = CellObservation.create(
            radio=Radio.gsm, mcc=GB_MCC, mnc=5, lac=12345, cid=23456,
            report_id=report_id, lat=GB_LAT, lon=GB_LON, created=now)

        json_data = kombu_dumps(obs)
        self.assertTrue('accuracy' not in json_data)

        result = kombu_loads(json_data)
        self.assertTrue(type(result), CellObservation)
        self.assertTrue(result.accuracy is None)
        self.assertEqual(type(result.report_id), uuid.UUID)
        self.assertEqual(result.report_id, report_id)
        self.assertEqual(type(result.radio), Radio)
        self.assertEqual(result.radio, Radio.gsm)
        self.assertEqual(result.mcc, GB_MCC)
        self.assertEqual(result.mnc, 5)
        self.assertEqual(result.lac, 12345)
        self.assertEqual(result.cid, 23456)
        self.assertEqual(result.lat, GB_LAT)
        self.assertEqual(result.lon, GB_LON)
        self.assertEqual(type(result.created), datetime.datetime)
        self.assertEqual(result.created, now)
示例#4
0
文件: export.py 项目: awoland/ichnaea
    def export(self, export_task, upload_task):
        length = self.queue_length()
        if length < self.batch:  # pragma: no cover
            # not enough to do, skip
            return 0

        queued_items = self.dequeue_reports()
        if queued_items and len(queued_items) < self.batch:  # pragma: no cover
            # race condition, something emptied the queue in between
            # our llen call and fetching the items, put them back
            self.redis_client.lpush(self.redis_key, *queued_items)
            return 0

        # schedule the upload task
        items = [kombu_loads(item) for item in queued_items]
        # split out metadata
        reports = [item['report'] for item in items]

        upload_task.delay(self.export_name, dumps({'items': reports}))

        # check the queue at the end, if there's still enough to do
        # schedule another job, but give it a second before it runs
        if self.queue_length() >= self.batch:
            export_task.apply_async(
                args=[self.export_name],
                countdown=1,
                expires=300)

        return len(queued_items)
示例#5
0
 def _dequeue(self, queue_key, batch):
     with self.redis_client.pipeline() as pipe:
         pipe.multi()
         pipe.lrange(queue_key, 0, batch - 1)
         if batch != 0:
             pipe.ltrim(queue_key, batch, -1)
         else:
             # special case for deleting everything
             pipe.ltrim(queue_key, 1, 0)
         result = [kombu_loads(item) for item in pipe.execute()[0]]
     return result
示例#6
0
def insert_measures(self, items=None, nickname='', email='',
                    api_key_log=False, api_key_name=None):
    if not items:  # pragma: no cover
        return 0

    reports = kombu_loads(items)
    with self.db_session() as session:
        queue = ReportQueue(self, session,
                            api_key_log=api_key_log,
                            api_key_name=api_key_name,
                            insert_cell_task=insert_measures_cell,
                            insert_wifi_task=insert_measures_wifi)
        length = queue.insert(reports, nickname=nickname, email=email)
        session.commit()
    return length
示例#7
0
文件: tasks.py 项目: awoland/ichnaea
def insert_measures(self, items=None, nickname='', email='',
                    api_key_log=False, api_key_name=None):
    if not items:  # pragma: no cover
        return 0

    reports = kombu_loads(items)
    with self.db_session() as session:
        queue = ReportQueueV1(self, session,
                              api_key_log=api_key_log,
                              api_key_name=api_key_name,
                              insert_cell_task=insert_measures_cell,
                              insert_wifi_task=insert_measures_wifi)
        length = queue.insert(reports, nickname=nickname, email=email)
        session.commit()
    return length
示例#8
0
def insert_measures(self, items=None, email=None, ip=None, nickname=None,
                    api_key_text=None):
    if not items:  # pragma: no cover
        return 0

    reports = kombu_loads(items)
    with self.redis_pipeline() as pipe:
        with self.db_session() as session:
            api_key = api_key_text and ApiKey.getkey(session, api_key_text)

            queue = ReportQueue(self, session, pipe,
                                api_key=api_key,
                                email=email,
                                ip=ip,
                                nickname=nickname,
                                insert_cell_task=insert_measures_cell,
                                insert_wifi_task=insert_measures_wifi)
            length = queue.insert(reports)
    return length
示例#9
0
def insert_measures(self,
                    items=None,
                    nickname='',
                    email='',
                    api_key_text=None):
    if not items:  # pragma: no cover
        return 0

    reports = kombu_loads(items)
    with self.db_session() as session:
        api_key = api_key_text and ApiKey.getkey(session, api_key_text)

        queue = ReportQueueV1(self,
                              session,
                              api_key,
                              insert_cell_task=insert_measures_cell,
                              insert_wifi_task=insert_measures_wifi)
        length = queue.insert(reports, nickname=nickname, email=email)
        session.commit()
    return length
    def test_customjson(self):
        key = '3680873e9b83'
        now = util.utcnow()
        report_id = uuid.uuid1()
        obs = WifiObservation.create(
            key=key, report_id=report_id, lat=GB_LAT, lon=GB_LON,
            created=now)

        json_data = kombu_dumps(obs)
        self.assertTrue('accuracy' not in json_data)

        result = kombu_loads(json_data)
        self.assertTrue(type(result), WifiObservation)
        self.assertTrue(result.accuracy is None)
        self.assertEqual(type(result.report_id), uuid.UUID)
        self.assertEqual(result.report_id, report_id)
        self.assertEqual(result.key, key)
        self.assertEqual(result.lat, GB_LAT)
        self.assertEqual(result.lon, GB_LON)
        self.assertEqual(type(result.created), datetime.datetime)
        self.assertEqual(result.created, now)
示例#11
0
 def test_uuid1_roundtrip(self):
     test_uuid = uuid.uuid1()
     data = kombu_loads(kombu_dumps({'d': test_uuid}))
     self.assertEqual(data['d'], test_uuid)
     self.assertEqual(data['d'].version, 1)
示例#12
0
文件: area.py 项目: awoland/ichnaea
def dequeue_areas(redis_client, pipeline_key, batch=100):
    pipe = redis_client.pipeline()
    pipe.multi()
    pipe.lrange(pipeline_key, 0, batch - 1)
    pipe.ltrim(pipeline_key, batch, -1)
    return [kombu_loads(item) for item in pipe.execute()[0]]
示例#13
0
 def test_datetime_us_roundtrip(self):
     us = pytz.timezone('US/Eastern')
     test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
     self.assertTrue(data['d'].tzinfo is pytz.utc)
示例#14
0
 def test_datetime_roundtrip(self):
     test_date = datetime(2012, 5, 17, 14, 28, 56)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
示例#15
0
 def test_date_roundtrip(self):
     test_date = date(2012, 5, 17)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
示例#16
0
 def test_uuid4_roundtrip(self):
     test_uuid = uuid.uuid4()
     data = kombu_loads(kombu_dumps({'d': test_uuid}))
     self.assertEqual(data['d'], test_uuid)
     self.assertEqual(data['d'].version, 4)
示例#17
0
 def test_date_roundtrip(self):
     test_date = date(2012, 5, 17)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
示例#18
0
 def test_datetime_roundtrip(self):
     test_date = datetime(2012, 5, 17, 14, 28, 56)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
示例#19
0
 def test_datetime_utc_roundtrip(self):
     test_date = util.utcnow()
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
示例#20
0
 def test_datetime_utc_roundtrip(self):
     test_date = util.utcnow()
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
示例#21
0
 def test_namedtuple(self):
     Named = namedtuple('Named', 'one two')
     data = kombu_loads(kombu_dumps({'d': Named(one=1, two=[2])}))
     self.assertEqual(data['d'], {'one': 1, 'two': [2]})
示例#22
0
 def test_datetime_us_roundtrip(self):
     us = pytz.timezone('US/Eastern')
     test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
     self.assertTrue(data['d'].tzinfo is pytz.utc)
示例#23
0
文件: area.py 项目: simudream/ichnaea
def dequeue_areas(redis_client, pipeline_key, batch=100):
    pipe = redis_client.pipeline()
    pipe.multi()
    pipe.lrange(pipeline_key, 0, batch - 1)
    pipe.ltrim(pipeline_key, batch, -1)
    return [kombu_loads(item) for item in pipe.execute()[0]]
示例#24
0
 def test_namedtuple(self):
     Named = namedtuple('Named', 'one two')
     data = kombu_loads(kombu_dumps({'d': Named(one=1, two=[2])}))
     self.assertEqual(data['d'], {'one': 1, 'two': [2]})