Ejemplo n.º 1
0
    def test_customjson(self):
        now = util.utcnow()
        report_id = uuid.uuid1()
        obs = CellObservation.create(
            radio=Radio.gsm, mcc=GB_MCC, mnc=5, lac=12345, cid=23456,
            report_id=report_id, lat=GB_LAT, lon=GB_LON, created=now)

        json_data = kombu_dumps(obs)
        self.assertTrue('accuracy' not in json_data)

        result = kombu_loads(json_data)
        self.assertTrue(type(result), CellObservation)
        self.assertTrue(result.accuracy is None)
        self.assertEqual(type(result.report_id), uuid.UUID)
        self.assertEqual(result.report_id, report_id)
        self.assertEqual(type(result.radio), Radio)
        self.assertEqual(result.radio, Radio.gsm)
        self.assertEqual(result.mcc, GB_MCC)
        self.assertEqual(result.mnc, 5)
        self.assertEqual(result.lac, 12345)
        self.assertEqual(result.cid, 23456)
        self.assertEqual(result.lat, GB_LAT)
        self.assertEqual(result.lon, GB_LON)
        self.assertEqual(type(result.created), datetime.datetime)
        self.assertEqual(result.created, now)
Ejemplo n.º 2
0
 def _enqueue(self, items, queue_key, batch=100, expire=False, pipe=None):
     data = [str(kombu_dumps(item)) for item in items]
     if pipe is not None:
         self._push(pipe, data, queue_key, batch=batch, expire=expire)
     else:
         with redis_pipeline(self.redis_client) as pipe:
             self._push(pipe, data, queue_key, batch=batch, expire=expire)
Ejemplo n.º 3
0
def geosubmit_view(request):
    stats_client = request.registry.stats_client
    api_key_log = getattr(request, 'api_key_log', False)
    api_key_name = getattr(request, 'api_key_name', None)

    try:
        data, errors = preprocess_request(
            request,
            schema=GeoSubmitBatchSchema(),
            response=JSONParseError,
        )
    except JSONParseError:
        # capture JSON exceptions for submit calls
        request.registry.heka_client.raven(RAVEN_ERROR)
        raise

    items = map_items(data['items'])
    nickname = request.headers.get('X-Nickname', u'')
    if isinstance(nickname, str):
        nickname = nickname.decode('utf-8', 'ignore')

    email = request.headers.get('X-Email', u'')
    if isinstance(email, str):
        email = email.decode('utf-8', 'ignore')

    # count the number of batches and emit a pseudo-timer to capture
    # the number of reports per batch
    length = len(items)
    stats_client.incr('items.uploaded.batches')
    stats_client.timing('items.uploaded.batch_size', length)

    if api_key_log:
        stats_client.incr('items.api_log.%s.uploaded.batches' % api_key_name)
        stats_client.timing(
            'items.api_log.%s.uploaded.batch_size' % api_key_name, length)

    # batch incoming data into multiple tasks, in case someone
    # manages to submit us a huge single request
    for i in range(0, length, 100):
        batch = kombu_dumps(items[i:i + 100])
        # insert observations, expire the task if it wasn't processed
        # after six hours to avoid queue overload
        try:
            insert_measures.apply_async(kwargs={
                'email': email,
                'items': batch,
                'nickname': nickname,
                'api_key_log': api_key_log,
                'api_key_name': api_key_name,
            },
                                        expires=21600)
        except ConnectionError:  # pragma: no cover
            return HTTPServiceUnavailable()

    result = HTTPOk()
    result.content_type = 'application/json'
    result.body = '{}'
    return result
Ejemplo n.º 4
0
def enqueue_areas(session, redis_client, area_keys,
                  pipeline_key, expire=86400, batch=100):
    pipe = redis_client.pipeline()
    area_json = [str(kombu_dumps(area)) for area in area_keys]

    while area_json:
        pipe.lpush(pipeline_key, *area_json[:batch])
        area_json = area_json[batch:]

    # Expire key after 24 hours
    pipe.expire(pipeline_key, expire)
    pipe.execute()
Ejemplo n.º 5
0
 def insert_measures(self, request_data):
     # batch incoming data into multiple tasks, in case someone
     # manages to submit us a huge single request
     submit_data = self.prepare_measure_data(request_data)
     for i in range(0, len(submit_data), 100):
         batch = kombu_dumps(submit_data[i:i + 100])
         # insert observations, expire the task if it wasn't processed
         # after six hours to avoid queue overload
         insert_measures.apply_async(kwargs={
             'api_key_text': self.api_key.valid_key,
             'email': self.email,
             'items': batch,
             'nickname': self.nickname,
         },
                                     expires=21600)
Ejemplo n.º 6
0
 def queue_export(self, reports):
     metadata = {
         'api_key': self.api_key,
         'email': self.email,
         'nickname': self.nickname,
     }
     data = []
     for report in reports:
         data.append(str(kombu_dumps({'report': report,
                                      'metadata': metadata})))
     if data:
         for name, settings in self.export_queues.items():
             redis_key = settings['redis_key']
             source_apikey = settings.get('source_apikey', _sentinel)
             if self.api_key != source_apikey:
                 self.redis_client.lpush(redis_key, *data)
Ejemplo n.º 7
0
def enqueue_areas(session,
                  redis_client,
                  area_keys,
                  pipeline_key,
                  expire=86400,
                  batch=100):
    pipe = redis_client.pipeline()
    area_json = [str(kombu_dumps(area)) for area in area_keys]

    while area_json:
        pipe.lpush(pipeline_key, *area_json[:batch])
        area_json = area_json[batch:]

    # Expire key after 24 hours
    pipe.expire(pipeline_key, expire)
    pipe.execute()
Ejemplo n.º 8
0
def enqueue_lacs(session,
                 redis_client,
                 lac_keys,
                 pipeline_key,
                 expire=86400,
                 batch=100):
    pipe = redis_client.pipeline()
    lac_json = [str(kombu_dumps(lac)) for lac in lac_keys]

    while lac_json:
        pipe.lpush(pipeline_key, *lac_json[:batch])
        lac_json = lac_json[batch:]

    # Expire key after 24 hours
    pipe.expire(pipeline_key, expire)
    pipe.execute()
Ejemplo n.º 9
0
    def send(self, url, data):
        groups = defaultdict(list)
        for item in simplejson.loads(data):
            group = MetadataGroup(**item['metadata'])
            report = self._format_report(item['report'])
            if report:
                groups[group].append(report)

        for group, reports in groups.items():
            self._task().apply_async(
                kwargs={
                    'api_key_text': group.api_key,
                    'email': group.email,
                    'ip': group.ip,
                    'items': kombu_dumps(reports),
                    'nickname': group.nickname,
                },
                expires=21600)
Ejemplo n.º 10
0
 def queue_export(self, reports):
     metadata = {
         'api_key': self.api_key,
         'email': self.email,
         'nickname': self.nickname,
     }
     data = []
     for report in reports:
         data.append(
             str(kombu_dumps({
                 'report': report,
                 'metadata': metadata
             })))
     if data:
         for name, settings in self.export_queues.items():
             redis_key = settings['redis_key']
             source_apikey = settings.get('source_apikey', _sentinel)
             if self.api_key != source_apikey:
                 self.redis_client.lpush(redis_key, *data)
Ejemplo n.º 11
0
    def test_customjson(self):
        key = '3680873e9b83'
        now = util.utcnow()
        report_id = uuid.uuid1()
        obs = WifiObservation.create(
            key=key, report_id=report_id, lat=GB_LAT, lon=GB_LON,
            created=now)

        json_data = kombu_dumps(obs)
        self.assertTrue('accuracy' not in json_data)

        result = kombu_loads(json_data)
        self.assertTrue(type(result), WifiObservation)
        self.assertTrue(result.accuracy is None)
        self.assertEqual(type(result.report_id), uuid.UUID)
        self.assertEqual(result.report_id, report_id)
        self.assertEqual(result.key, key)
        self.assertEqual(result.lat, GB_LAT)
        self.assertEqual(result.lon, GB_LON)
        self.assertEqual(type(result.created), datetime.datetime)
        self.assertEqual(result.created, now)
Ejemplo n.º 12
0
 def test_datetime_us_roundtrip(self):
     us = pytz.timezone('US/Eastern')
     test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
     self.assertTrue(data['d'].tzinfo is pytz.utc)
Ejemplo n.º 13
0
 def test_datetime_utc_roundtrip(self):
     test_date = util.utcnow()
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
Ejemplo n.º 14
0
 def test_datetime_roundtrip(self):
     test_date = datetime(2012, 5, 17, 14, 28, 56)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
Ejemplo n.º 15
0
 def test_datetime_dump(self):
     data = kombu_dumps({'d': datetime(2012, 5, 17, 14, 28, 56)})
     self.assertTrue('__datetime__' in data)
Ejemplo n.º 16
0
 def test_date_roundtrip(self):
     test_date = date(2012, 5, 17)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
Ejemplo n.º 17
0
 def test_uuid1_roundtrip(self):
     test_uuid = uuid.uuid1()
     data = kombu_loads(kombu_dumps({'d': test_uuid}))
     self.assertEqual(data['d'], test_uuid)
     self.assertEqual(data['d'].version, 1)
Ejemplo n.º 18
0
 def test_namedtuple(self):
     Named = namedtuple('Named', 'one two')
     data = kombu_loads(kombu_dumps({'d': Named(one=1, two=[2])}))
     self.assertEqual(data['d'], {'one': 1, 'two': [2]})
Ejemplo n.º 19
0
 def test_datetime_us_roundtrip(self):
     us = pytz.timezone('US/Eastern')
     test_date = datetime(2012, 5, 17, 14, 28, 56, tzinfo=us)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
     self.assertTrue(data['d'].tzinfo is pytz.utc)
Ejemplo n.º 20
0
 def test_datetime_utc_roundtrip(self):
     test_date = util.utcnow()
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
Ejemplo n.º 21
0
 def test_datetime_roundtrip(self):
     test_date = datetime(2012, 5, 17, 14, 28, 56)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date.replace(tzinfo=pytz.UTC), data['d'])
Ejemplo n.º 22
0
 def test_datetime_dump(self):
     data = kombu_dumps({'d': datetime(2012, 5, 17, 14, 28, 56)})
     self.assertTrue('__datetime__' in data)
Ejemplo n.º 23
0
 def test_namedtuple(self):
     Named = namedtuple('Named', 'one two')
     data = kombu_loads(kombu_dumps({'d': Named(one=1, two=[2])}))
     self.assertEqual(data['d'], {'one': 1, 'two': [2]})
Ejemplo n.º 24
0
 def test_uuid4(self):
     data = kombu_dumps({'d': uuid.uuid4()})
     self.assertTrue('__uuid__' in data)
Ejemplo n.º 25
0
 def test_uuid4_roundtrip(self):
     test_uuid = uuid.uuid4()
     data = kombu_loads(kombu_dumps({'d': test_uuid}))
     self.assertEqual(data['d'], test_uuid)
     self.assertEqual(data['d'].version, 4)
Ejemplo n.º 26
0
 def test_date_dump(self):
     data = kombu_dumps({'d': date(2012, 5, 17)})
     self.assertTrue('__date__' in data)
Ejemplo n.º 27
0
 def test_date_roundtrip(self):
     test_date = date(2012, 5, 17)
     data = kombu_loads(kombu_dumps({'d': test_date}))
     self.assertEqual(test_date, data['d'])
Ejemplo n.º 28
0
 def test_uuid4(self):
     data = kombu_dumps({'d': uuid.uuid4()})
     self.assertTrue('__uuid__' in data)
Ejemplo n.º 29
0
 def test_date_dump(self):
     data = kombu_dumps({'d': date(2012, 5, 17)})
     self.assertTrue('__date__' in data)