示例#1
0
文件: views.py 项目: awoland/ichnaea
    def leaders_weekly_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['leaders_weekly']
        cached = redis_client.get(cache_key)

        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = {
                'new_cell': {'leaders1': [], 'leaders2': []},
                'new_wifi': {'leaders1': [], 'leaders2': []},
            }
            for name, value in leaders_weekly(session).items():
                value = [
                    {
                        'pos': l[0] + 1,
                        'num': l[1]['num'],
                        'nickname': l[1]['nickname'],
                    } for l in list(enumerate(value))]
                half = len(value) // 2 + len(value) % 2
                data[name] = {
                    'leaders1': value[:half],
                    'leaders2': value[half:],
                }
            redis_client.set(cache_key, dumps(data), ex=3600)

        return {
            'page_title': 'Weekly Leaderboard',
            'scores': data,
        }
示例#2
0
文件: views.py 项目: elkos/ichnaea
def submit_view(request):
    data, errors = preprocess_request(
        request,
        schema=SubmitSchema(),
        extra_checks=(submit_validator, ),
    )

    items = data['items']
    nickname = request.headers.get('X-Nickname', u'')
    if isinstance(nickname, str):
        nickname = nickname.decode('utf-8', 'ignore')
    # batch incoming data into multiple tasks, in case someone
    # manages to submit us a huge single request
    for i in range(0, len(items), 100):
        items = dumps(items[i:i + 100])
        # insert measures, expire the task if it wasn't processed
        # after two hours to avoid queue overload
        try:
            insert_measures.apply_async(
                kwargs={'items': items, 'nickname': nickname},
                expires=7200)
        except ConnectionError:
            return HTTPServiceUnavailable()

    return HTTPNoContent()
示例#3
0
文件: views.py 项目: awoland/ichnaea
    def stats_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['stats']
        cached = redis_client.get(cache_key)
        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = {
                'leaders': [],
                'metrics1': [],
                'metrics2': [],
            }
            metrics = global_stats(session)
            metric_names = [
                (StatKey.unique_cell.name, 'MLS Cells'),
                (StatKey.unique_ocid_cell.name, 'OpenCellID Cells'),
                (StatKey.cell.name, 'MLS Cell Observations'),
                (StatKey.unique_wifi.name, 'Wifi Networks'),
                (StatKey.wifi.name, 'Wifi Observations'),
            ]
            for mid, name in metric_names[:3]:
                data['metrics1'].append({'name': name, 'value': metrics[mid]})
            for mid, name in metric_names[3:]:
                data['metrics2'].append({'name': name, 'value': metrics[mid]})
            redis_client.set(cache_key, dumps(data), ex=3600)

        result = {'page_title': 'Statistics'}
        result.update(data)
        return result
示例#4
0
文件: views.py 项目: thebent/ichnaea
    def stats_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['stats']
        cached = redis_client.get(cache_key)
        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = {
                'leaders': [],
                'metrics1': [],
                'metrics2': [],
            }
            metrics = global_stats(session)
            metric_names = [
                (StatKey.unique_cell.name, 'MLS Cells'),
                (StatKey.unique_ocid_cell.name, 'OpenCellID Cells'),
                (StatKey.cell.name, 'MLS Cell Observations'),
                (StatKey.unique_wifi.name, 'Wifi Networks'),
                (StatKey.wifi.name, 'Wifi Observations'),
            ]
            for mid, name in metric_names[:3]:
                data['metrics1'].append({'name': name, 'value': metrics[mid]})
            for mid, name in metric_names[3:]:
                data['metrics2'].append({'name': name, 'value': metrics[mid]})
            redis_client.set(cache_key, dumps(data), ex=3600)

        result = {'page_title': 'Statistics'}
        result.update(data)
        return result
示例#5
0
    def leaders_weekly_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['leaders_weekly']
        cached = redis_client.get(cache_key)

        if cached:
            data = loads(cached)
        else:
            session = self.request.db_slave_session
            data = {
                'new_cell': {'leaders1': [], 'leaders2': []},
                'new_wifi': {'leaders1': [], 'leaders2': []},
            }
            for name, value in leaders_weekly(session).items():
                value = [
                    {
                        'pos': l[0] + 1,
                        'num': l[1]['num'],
                        'nickname': l[1]['nickname'],
                    } for l in list(enumerate(value))]
                half = len(value) // 2 + len(value) % 2
                data[name] = {
                    'leaders1': value[:half],
                    'leaders2': value[half:],
                }
            redis_client.set(cache_key, dumps(data), ex=3600)

        return {
            'page_title': 'Weekly Leaderboard',
            'scores': data,
        }
示例#6
0
文件: views.py 项目: awoland/ichnaea
    def leaders_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['leaders']
        cached = redis_client.get(cache_key)

        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = list(enumerate(leaders(session)))
            data = [
                {
                    'pos': l[0] + 1,
                    'num': l[1]['num'],
                    'nickname': l[1]['nickname'],
                    'anchor': l[1]['nickname'],
                } for l in data]
            redis_client.set(cache_key, dumps(data), ex=600)

        half = len(data) // 2 + len(data) % 2
        leaders1 = data[:half]
        leaders2 = data[half:]
        return {
            'page_title': 'Leaderboard',
            'leaders1': leaders1,
            'leaders2': leaders2,
        }
示例#7
0
文件: views.py 项目: thebent/ichnaea
    def leaders_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['leaders']
        cached = redis_client.get(cache_key)

        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = list(enumerate(leaders(session)))
            data = [{
                'pos': l[0] + 1,
                'num': l[1]['num'],
                'nickname': l[1]['nickname'],
                'anchor': l[1]['nickname'],
            } for l in data]
            redis_client.set(cache_key, dumps(data), ex=600)

        half = len(data) // 2 + len(data) % 2
        leaders1 = data[:half]
        leaders2 = data[half:]
        return {
            'page_title': 'Leaderboard',
            'leaders1': leaders1,
            'leaders2': leaders2,
        }
示例#8
0
文件: base.py 项目: SOFTowaha/ichnaea
 def test_gzip(self):
     cell, query = self._one_cell_query()
     data = {'items': [query]}
     body = util.encode_gzip(dumps(data))
     headers = {'Content-Encoding': 'gzip'}
     self.app.post(
         self.url, body, headers=headers,
         content_type='application/json', status=self.status)
     self._assert_queue_size(1)
示例#9
0
文件: views.py 项目: walexi/ichnaea
def submit_view(request):
    stats_client = request.registry.stats_client
    api_key_log = getattr(request, 'api_key_log', False)
    api_key_name = getattr(request, 'api_key_name', None)

    try:
        data, errors = preprocess_request(
            request,
            schema=SubmitSchema(),
            extra_checks=(submit_validator, ),
            response=JSONError,
        )
    except JSONError:
        # capture JSON exceptions for submit calls
        request.registry.heka_client.raven(RAVEN_ERROR)
        raise

    items = data['items']
    nickname = request.headers.get('X-Nickname', u'')
    if isinstance(nickname, str):
        nickname = nickname.decode('utf-8', 'ignore')

    email = request.headers.get('X-Email', u'')
    if isinstance(email, str):
        email = email.decode('utf-8', 'ignore')

    # count the number of batches and emit a pseudo-timer to capture
    # the number of reports per batch
    length = len(items)
    stats_client.incr('items.uploaded.batches')
    stats_client.timing('items.uploaded.batch_size', length)

    if api_key_log:
        stats_client.incr('items.api_log.%s.uploaded.batches' % api_key_name)
        stats_client.timing(
            'items.api_log.%s.uploaded.batch_size' % api_key_name, length)

    # batch incoming data into multiple tasks, in case someone
    # manages to submit us a huge single request
    for i in range(0, length, 100):
        batch = dumps(items[i:i + 100])
        # insert measures, expire the task if it wasn't processed
        # after six hours to avoid queue overload
        try:
            insert_measures.apply_async(kwargs={
                'email': email,
                'items': batch,
                'nickname': nickname,
                'api_key_log': api_key_log,
                'api_key_name': api_key_name,
            },
                                        expires=21600)
        except ConnectionError:  # pragma: no cover
            return HTTPServiceUnavailable()

    return HTTPNoContent()
示例#10
0
 def test_gzip(self):
     app = self.app
     data = {"cell": [{"mcc": FRANCE_MCC, "mnc": 2, "lac": 3, "cid": 4}]}
     body = ''.join(gzip_app_iter(dumps(data)))
     headers = {
         'Content-Encoding': 'gzip',
     }
     res = app.post('/v1/search?key=test', body, headers=headers,
                    content_type='application/json', status=200)
     self.assertEqual(res.content_type, 'application/json')
     self.assertEqual(res.json, {"status": "not_found"})
示例#11
0
文件: tests.py 项目: aliendb/ichnaea
 def test_gzip(self):
     app = self.app
     data = {'cell': [{'mcc': FRANCE_MCC, 'mnc': 2, 'lac': 3, 'cid': 4}]}
     body = util.encode_gzip(dumps(data))
     headers = {
         'Content-Encoding': 'gzip',
     }
     res = app.post('/v1/search?key=test', body, headers=headers,
                    content_type='application/json', status=200)
     self.assertEqual(res.content_type, 'application/json')
     self.assertEqual(res.json, {'status': 'not_found'})
示例#12
0
def enqueue_lacs(session, redis_client, lac_keys, expire=86400):
    key = UPDATE_KEY['cell_lac']
    pipe = redis_client.pipeline()
    values = []
    for lac in lac_keys:
        values.append(dumps(lac._asdict()))
    for i in range(0, len(values), 100):
        pipe.lpush(key, *[str(v) for v in values[i:i + 100]])
    # Expire key after 24 hours
    pipe.expire(key, expire)
    pipe.execute()
示例#13
0
文件: views.py 项目: thebent/ichnaea
 def stats_wifi_json(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['stats_wifi_json']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         session = self.request.db_ro_session
         data = histogram(session, StatKey.unique_wifi)
         redis_client.set(cache_key, dumps(data), ex=3600)
     return {'series': [{'title': 'MLS WiFi', 'data': data[0]}]}
示例#14
0
文件: views.py 项目: awoland/ichnaea
 def stats_wifi_json(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['stats_wifi_json']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         session = self.request.db_ro_session
         data = histogram(session, StatKey.unique_wifi)
         redis_client.set(cache_key, dumps(data), ex=3600)
     return {'series': [{'title': 'MLS WiFi', 'data': data[0]}]}
示例#15
0
def enqueue_lacs(session, redis_client, lac_keys, expire=86400):
    key = UPDATE_KEY['cell_lac']
    pipe = redis_client.pipeline()
    values = []
    for lac in lac_keys:
        values.append(dumps(lac._asdict()))
    for i in range(0, len(values), 100):
        pipe.lpush(key, *[str(v) for v in values[i:i + 100]])
    # Expire key after 24 hours
    pipe.expire(key, expire)
    pipe.execute()
示例#16
0
文件: views.py 项目: frewsxcv/ichnaea
 def stats_cell_json(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['stats_cell_json']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         session = self.request.db_slave_session
         data = histogram(session, 'unique_cell')
         redis_client.set(cache_key, dumps(data), ex=3600)
     return {'series': [{'title': 'MLS Cells', 'data': data[0]}]}
示例#17
0
 def test_gzip(self):
     app = self.app
     data = {"items": [{"lat": 1.0,
                        "lon": 2.0,
                        "wifi": [{"key": "aaaaaaaaaaaa"}]}]}
     body = ''.join(gzip_app_iter(dumps(data)))
     headers = {
         'Content-Encoding': 'gzip',
     }
     res = app.post('/v1/submit?key=test', body, headers=headers,
                    content_type='application/json', status=204)
     self.assertEqual(res.body, '')
示例#18
0
文件: views.py 项目: awoland/ichnaea
    def stats_countries_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['stats_countries']
        cached = redis_client.get(cache_key)
        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = countries(session)
            redis_client.set(cache_key, dumps(data), ex=3600)

        return {'page_title': 'Cell Statistics', 'metrics': data}
示例#19
0
 def test_gzip(self):
     app = self.app
     data = {"items": [{"lat": 1.0,
                        "lon": 2.0,
                        "wifi": [{"key": "aaaaaaaaaaaa"}]}]}
     body = ''.join(gzip_app_iter(dumps(data)))
     headers = {
         'Content-Encoding': 'gzip',
     }
     res = app.post('/v1/submit?key=test', body, headers=headers,
                    content_type='application/json', status=204)
     self.assertEqual(res.body, '')
示例#20
0
文件: views.py 项目: thebent/ichnaea
    def stats_countries_view(self):
        redis_client = self.request.registry.redis_client
        cache_key = CACHE_KEYS['stats_countries']
        cached = redis_client.get(cache_key)
        if cached:
            data = loads(cached)
        else:
            session = self.request.db_ro_session
            data = countries(session)
            redis_client.set(cache_key, dumps(data), ex=3600)

        return {'page_title': 'Cell Statistics', 'metrics': data}
示例#21
0
 def test_gzip(self):
     app = self.app
     data = {"cell": [{"mcc": FRANCE_MCC, "mnc": 2, "lac": 3, "cid": 4}]}
     body = ''.join(gzip_app_iter(dumps(data)))
     headers = {
         'Content-Encoding': 'gzip',
     }
     res = app.post('/v1/search?key=test',
                    body,
                    headers=headers,
                    content_type='application/json',
                    status=200)
     self.assertEqual(res.content_type, 'application/json')
     self.assertEqual(res.json, {"status": "not_found"})
示例#22
0
文件: views.py 项目: thebent/ichnaea
 def downloads_view(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['downloads']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         settings = self.request.registry.settings
         assets_bucket = settings['ichnaea']['s3_assets_bucket']
         assets_url = settings['ichnaea']['assets_url']
         raven_client = self.request.registry.raven_client
         data = s3_list_downloads(assets_bucket, assets_url, raven_client)
         # cache the download files, expire after 10 minutes
         redis_client.set(cache_key, dumps(data), ex=600)
     return {'page_title': 'Downloads', 'files': data}
示例#23
0
文件: views.py 项目: awoland/ichnaea
 def downloads_view(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['downloads']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         settings = self.request.registry.settings
         assets_bucket = settings['ichnaea']['s3_assets_bucket']
         assets_url = settings['ichnaea']['assets_url']
         raven_client = self.request.registry.raven_client
         data = s3_list_downloads(assets_bucket, assets_url, raven_client)
         # cache the download files, expire after 10 minutes
         redis_client.set(cache_key, dumps(data), ex=600)
     return {'page_title': 'Downloads', 'files': data}
示例#24
0
文件: views.py 项目: awoland/ichnaea
 def stats_cell_json(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['stats_cell_json']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         session = self.request.db_ro_session
         mls_data = histogram(session, StatKey.unique_cell)
         ocid_data = histogram(session, StatKey.unique_ocid_cell)
         data = [
             {'title': 'MLS Cells', 'data': mls_data[0]},
             {'title': 'OCID Cells', 'data': ocid_data[0]},
         ]
         redis_client.set(cache_key, dumps(data), ex=3600)
     return {'series': data}
示例#25
0
 def stats_cell_json(self):
     redis_client = self.request.registry.redis_client
     cache_key = CACHE_KEYS['stats_cell_json']
     cached = redis_client.get(cache_key)
     if cached:
         data = loads(cached)
     else:
         session = self.request.db_slave_session
         mls_data = histogram(session, StatKey.unique_cell)
         ocid_data = histogram(session, StatKey.unique_ocid_cell)
         data = [
             {'title': 'MLS Cells', 'data': mls_data[0]},
             {'title': 'OCID Cells', 'data': ocid_data[0]},
         ]
         redis_client.set(cache_key, dumps(data), ex=3600)
     return {'series': data}
示例#26
0
def submit_view(request):
    try:
        data, errors = preprocess_request(
            request,
            schema=SubmitSchema(),
            extra_checks=(submit_validator, ),
            response=JSONError,
        )
    except JSONError:
        # capture JSON exceptions for submit calls
        request.registry.heka_client.raven(RAVEN_ERROR)
        raise

    items = data['items']
    nickname = request.headers.get('X-Nickname', u'')
    if isinstance(nickname, str):
        nickname = nickname.decode('utf-8', 'ignore')

    email = request.headers.get('X-Email', u'')
    if isinstance(email, str):
        email = email.decode('utf-8', 'ignore')

    # batch incoming data into multiple tasks, in case someone
    # manages to submit us a huge single request
    for i in range(0, len(items), 100):
        batch = dumps(items[i:i + 100])
        # insert measures, expire the task if it wasn't processed
        # after six hours to avoid queue overload
        try:
            insert_measures.apply_async(
                kwargs={
                    'email': email,
                    'items': batch,
                    'nickname': nickname,
                },
                expires=21600)
        except ConnectionError:  # pragma: no cover
            return HTTPServiceUnavailable()

    return HTTPNoContent()
示例#27
0
API_CHECK = text('select maxreq, log, shortname from api_key '
                 'where valid_key = :api_key')

INVALID_API_KEY = {
    'error': {
        'errors': [{
            'domain': 'usageLimits',
            'reason': 'keyInvalid',
            'message': 'Missing or invalid API key.',
        }],
        'code': 400,
        'message': 'Invalid API key',
    }
}
INVALID_API_KEY = dumps(INVALID_API_KEY)


def invalid_api_key_response():
    result = HTTPBadRequest()
    result.content_type = 'application/json'
    result.body = INVALID_API_KEY
    return result


def rate_limit(redis_client, api_key, maxreq=0, expire=86400):
    if not maxreq:
        return False

    dstamp = util.utcnow().strftime('%Y%m%d')
    key = 'apilimit:%s:%s' % (api_key, dstamp)
示例#28
0
 def __init__(self, errors, status=400):
     body = {'errors': errors}
     Response.__init__(self, dumps(body))
     self.status = status
     self.content_type = 'application/json'
示例#29
0
    dumps,
    loads,
)

MSG_EMPTY = 'No JSON body was provided.'
MSG_GZIP = 'Error decompressing gzip data stream.'
MSG_ONE_OF = 'You need to provide a mapping with least one cell or wifi entry.'
MSG_BAD_RADIO = 'Radio fields were not consistent in the cellTower data.'


DAILY_LIMIT = dumps({
    "error": {
        "errors": [{
            "domain": "usageLimits",
            "reason": "dailyLimitExceeded",
            "message": "You have exceeded your daily limit.",
        }],
        "code": 403,
        "message": "You have exceeded your daily limit.",
    }
})


class JSONError(HTTPError, BaseJSONError):
    def __init__(self, errors, status=400):
        body = {'errors': errors}
        Response.__init__(self, dumps(body))
        self.status = status
        self.content_type = 'application/json'

PARSE_ERROR = {"error": {
示例#30
0
 def __init__(self, errors, status=400):
     body = {'errors': errors}
     Response.__init__(self, dumps(body))
     self.status = status
     self.content_type = 'application/json'
示例#31
0
from ichnaea.customjson import (
    dumps,
    loads,
)

MSG_EMPTY = 'No JSON body was provided.'
MSG_GZIP = 'Error decompressing gzip data stream.'
MSG_BAD_RADIO = 'Radio fields were not consistent in the cellTower data.'

DAILY_LIMIT = dumps({
    "error": {
        "errors": [{
            "domain": "usageLimits",
            "reason": "dailyLimitExceeded",
            "message": "You have exceeded your daily limit.",
        }],
        "code":
        403,
        "message":
        "You have exceeded your daily limit.",
    }
})


class JSONError(HTTPError, BaseJSONError):
    def __init__(self, errors, status=400):
        body = {'errors': errors}
        Response.__init__(self, dumps(body))
        self.status = status
        self.content_type = 'application/json'
示例#32
0
)

NOT_FOUND = {
    "error": {
        "errors": [{
            "domain": "geolocation",
            "reason": "notFound",
            "message": "Not found",
        }],
        "code":
        404,
        "message":
        "Not found",
    }
}
NOT_FOUND = dumps(NOT_FOUND)


def configure_geolocate(config):
    config.add_route('v1_geolocate', '/v1/geolocate')
    config.add_view(geolocate_view, route_name='v1_geolocate', renderer='json')


@check_api_key('geolocate')
def geolocate_view(request):

    data, errors = preprocess_request(
        request,
        schema=GeoLocateSchema(),
        response=JSONParseError,
        accept_empty=True,
示例#33
0
    map_data,
)


NOT_FOUND = {
    "error": {
        "errors": [{
            "domain": "geolocation",
            "reason": "notFound",
            "message": "Not found",
        }],
        "code": 404,
        "message": "Not found",
    }
}
NOT_FOUND = dumps(NOT_FOUND)


def configure_geolocate(config):
    config.add_route('v1_geolocate', '/v1/geolocate')
    config.add_view(geolocate_view, route_name='v1_geolocate', renderer='json')


def geolocate_validator(data, errors):
    if errors:
        # don't add this error if something else was already wrong
        return
    cell = data.get('cellTowers', ())
    wifi = data.get('wifiAccessPoints', ())

    if not any(wifi) and not any(cell):
示例#34
0
def process_upload(nickname, email, items):
    if isinstance(nickname, str):  # pragma: no cover
        nickname = nickname.decode('utf-8', 'ignore')

    if isinstance(email, str):  # pragma: no cover
        email = email.decode('utf-8', 'ignore')

    batch_list = []
    for batch in items:
        normalized_cells = []
        for c in batch['cellTowers']:
            cell = {}
            cell['radio'] = batch['radioType']
            cell['mcc'] = c['mobileCountryCode']
            cell['mnc'] = c['mobileNetworkCode']
            cell['lac'] = c['locationAreaCode']
            cell['cid'] = c['cellId']
            cell['psc'] = c['psc']
            cell['asu'] = c['asu']
            cell['signal'] = c['signalStrength']
            cell['ta'] = c['timingAdvance']

            normalized_cells.append(cell)

        normalized_wifi = []
        for w in batch['wifiAccessPoints']:
            wifi = {}
            wifi['key'] = w['macAddress']
            wifi['frequency'] = w['frequency']
            wifi['channel'] = w['channel']
            wifi['signal'] = w['signalStrength']
            wifi['signalToNoiseRatio'] = w['signalToNoiseRatio']
            normalized_wifi.append(wifi)

        if batch['timestamp'] == 0:
            batch['timestamp'] = time.time() * 1000.0

        dt = utc.fromutc(
            datetime.utcfromtimestamp(batch['timestamp'] /
                                      1000.0).replace(tzinfo=utc))
        ts = dt.isoformat()

        normalized_batch = {
            'lat': batch['latitude'],
            'lon': batch['longitude'],
            'time': ts,
            'accuracy': batch['accuracy'],
            'altitude': batch['altitude'],
            'altitude_accuracy': batch['altitudeAccuracy'],
            'radio': batch['radioType'],
            'heading': batch['heading'],
            'speed': batch['speed'],
            'cell': normalized_cells,
            'wifi': normalized_wifi,
        }
        batch_list.append(normalized_batch)

    # Run the SubmitSchema validator against the normalized submit
    # data.
    schema = SubmitSchema()
    body = {'items': batch_list}
    errors = []
    validated = {}
    verify_schema(schema, body, errors, validated)

    if errors:  # pragma: no cover
        # Short circuit on any error in schema validation
        return errors

    for i in range(0, len(batch_list), 100):
        batch_items = dumps(batch_list[i:i + 100])
        # insert measures, expire the task if it wasn't processed
        # after six hours to avoid queue overload
        try:
            insert_measures.apply_async(kwargs={
                'email': email,
                'items': batch_items,
                'nickname': nickname,
            },
                                        expires=21600)
        except ConnectionError:  # pragma: no cover
            return SENTINEL
    return errors
示例#35
0
def process_upload(nickname, email, items):
    if isinstance(nickname, str):  # pragma: no cover
        nickname = nickname.decode('utf-8', 'ignore')

    if isinstance(email, str):  # pragma: no cover
        email = email.decode('utf-8', 'ignore')

    batch_list = []
    for batch in items:
        normalized_cells = []
        for c in batch['cellTowers']:
            cell = {}
            cell['radio'] = batch['radioType']
            cell['mcc'] = c['mobileCountryCode']
            cell['mnc'] = c['mobileNetworkCode']
            cell['lac'] = c['locationAreaCode']
            cell['cid'] = c['cellId']
            cell['psc'] = c['psc']
            cell['asu'] = c['asu']
            cell['signal'] = c['signalStrength']
            cell['ta'] = c['timingAdvance']

            normalized_cells.append(cell)

        normalized_wifi = []
        for w in batch['wifiAccessPoints']:
            wifi = {}
            wifi['key'] = w['macAddress']
            wifi['frequency'] = w['frequency']
            wifi['channel'] = w['channel']
            wifi['signal'] = w['signalStrength']
            wifi['signalToNoiseRatio'] = w['signalToNoiseRatio']
            normalized_wifi.append(wifi)

        if batch['timestamp'] == 0:
            batch['timestamp'] = time.time() * 1000.0

        dt = utc.fromutc(datetime.utcfromtimestamp(
                         batch['timestamp'] / 1000.0).replace(tzinfo=utc))
        ts = dt.isoformat()

        normalized_batch = {'lat': batch['latitude'],
                            'lon': batch['longitude'],
                            'time': ts,
                            'accuracy': batch['accuracy'],
                            'altitude': batch['altitude'],
                            'altitude_accuracy': batch['altitudeAccuracy'],
                            'radio': batch['radioType'],
                            'heading': batch['heading'],
                            'speed': batch['speed'],
                            'cell': normalized_cells,
                            'wifi': normalized_wifi,
                            }
        batch_list.append(normalized_batch)

    # Run the SubmitSchema validator against the normalized submit
    # data.
    schema = SubmitSchema()
    body = {'items': batch_list}
    errors = []
    validated = {}
    verify_schema(schema, body, errors, validated)

    if errors:  # pragma: no cover
        # Short circuit on any error in schema validation
        return errors

    for i in range(0, len(batch_list), 100):
        batch_items = dumps(batch_list[i:i + 100])
        # insert measures, expire the task if it wasn't processed
        # after six hours to avoid queue overload
        try:
            insert_measures.apply_async(
                kwargs={
                    'email': email,
                    'items': batch_items,
                    'nickname': nickname,
                },
                expires=21600)
        except ConnectionError:  # pragma: no cover
            return SENTINEL
    return errors
示例#36
0
from ichnaea.rate_limit import rate_limit
from ichnaea.api.error import DAILY_LIMIT
from ichnaea import util

INVALID_API_KEY = {
    'error': {
        'errors': [{
            'domain': 'usageLimits',
            'reason': 'keyInvalid',
            'message': 'Missing or invalid API key.',
        }],
        'code': 400,
        'message': 'Invalid API key',
    }
}
INVALID_API_KEY = dumps(INVALID_API_KEY)


class BaseServiceView(object):

    route = None

    @classmethod
    def configure(cls, config):
        path = cls.route
        name = path.lstrip('/').replace('/', '_')
        config.add_route(name, path)
        config.add_view(cls, route_name=name, renderer='json')

    def __init__(self, request):
        self.request = request