def test_upload(self, celery, session, stats): ExportConfigFactory( name='backup', batch=3, schema='s3', url='s3://bucket/backups/{source}/{api_key}/{year}/{month}/{day}') ApiKeyFactory(valid_key='e5444-794') session.flush() reports = self.add_reports(celery, 3) self.add_reports(celery, 3, api_key='e5444-794', source='gnss') self.add_reports(celery, 3, api_key='e5444-794', source='fused') self.add_reports(celery, 3, api_key=None) mock_conn = mock.MagicMock() mock_bucket = mock.MagicMock() mock_obj = mock.MagicMock() mock_conn.return_value.Bucket.return_value = mock_bucket mock_bucket.Object.return_value = mock_obj with mock.patch.object(boto3, 'resource', mock_conn): update_incoming.delay().get() obj_calls = mock_bucket.Object.call_args_list put_calls = mock_obj.put.call_args_list assert len(obj_calls) == 4 assert len(put_calls) == 4 keys = [] test_export = None for obj_call, put_call in zip(obj_calls, put_calls): s3_key = obj_call[0][0] assert s3_key.startswith('backups/') assert s3_key.endswith('.json.gz') assert put_call[1]['Body'] assert put_call[1]['ContentType'] == 'application/json' assert put_call[1]['ContentEncoding'] == 'gzip' keys.append(s3_key) if 'test' in s3_key: test_export = put_call[1]['Body'] # extract second and third path segment from key names groups = [tuple(key.split('/')[1:3]) for key in keys] assert (set(groups) == set([('gnss', 'test'), ('gnss', 'no_key'), ('gnss', 'e5444-794'), ('fused', 'e5444-794')])) # check uploaded content uploaded_text = util.decode_gzip(test_export) send_reports = simplejson.loads(uploaded_text)['items'] assert len(send_reports) == 3 expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] assert set(expect) == set(gotten) stats.check(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def preprocess_request(self): request_content = self.request.body if self.request.headers.get("Content-Encoding") == "gzip": # handle gzip self.request bodies try: request_content = util.decode_gzip(self.request.body) except GZIPDecodeError as exc: raise self.prepare_exception(ParseError({"decode": repr(exc)})) try: content = request_content.decode(self.request.charset) except UnicodeDecodeError as exc: # Use str(), since repr() includes the full source bytes raise self.prepare_exception(ParseError({"decode": str(exc)})) request_data = {} if content: try: request_data = json.loads(content) except ValueError as exc: raise self.prepare_exception(ParseError({"decode": repr(exc)})) validated_data = {} errors = None try: validated_data = self.schema.deserialize(request_data) except colander.Invalid as exc: errors = {"validation": exc.asdict()} if request_content and errors: raise self.prepare_exception(ParseError(errors)) return validated_data
def preprocess_request(self): errors = [] request_content = self.request.body if self.request.headers.get('Content-Encoding') == 'gzip': # handle gzip self.request bodies try: request_content = util.decode_gzip(self.request.body) except OSError as exc: errors.append({'name': None, 'description': repr(exc)}) request_data = {} try: request_data = json.loads(request_content, encoding=self.request.charset) except ValueError as exc: errors.append({'name': None, 'description': repr(exc)}) validated_data = {} try: validated_data = self.schema.deserialize(request_data) except colander.Invalid as exc: errors.append({'name': None, 'description': exc.asdict()}) if request_content and errors: raise self.prepare_exception(ParseError()) return (validated_data, errors)
def preprocess_request(self): errors = [] request_content = self.request.body if self.request.headers.get('Content-Encoding') == 'gzip': # handle gzip self.request bodies try: request_content = util.decode_gzip(self.request.body) except OSError as exc: errors.append({'name': None, 'description': repr(exc)}) request_data = {} try: request_data = json.loads( request_content, encoding=self.request.charset) except ValueError as exc: errors.append({'name': None, 'description': repr(exc)}) validated_data = {} try: validated_data = self.schema.deserialize(request_data) except colander.Invalid as exc: errors.append({'name': None, 'description': exc.asdict()}) if request_content and errors: raise self.prepare_exception(ParseError()) return (validated_data, errors)
def test_upload(self): reports = self.add_reports(3, email='secretemail@localhost') with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = decode_gzip(req.body) # make sure we don't accidentally leak emails self.assertFalse('secretemail' in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats( counter=[('items.export.test.batches', 1, 1), ('items.export.test.upload_status.200', 1)], timer=['items.export.test.upload'], )
def preprocess_request(self): errors = [] request_content = self.request.body if self.request.headers.get("Content-Encoding") == "gzip": # handle gzip self.request bodies try: request_content = util.decode_gzip(self.request.body) except GZIPDecodeError as exc: errors.append({"name": None, "description": repr(exc)}) content = request_content.decode(self.request.charset) request_data = {} try: request_data = json.loads(content) except ValueError as exc: errors.append({"name": None, "description": repr(exc)}) validated_data = {} try: validated_data = self.schema.deserialize(request_data) except colander.Invalid as exc: errors.append({"name": None, "description": exc.asdict()}) if request_content and errors: raise self.prepare_exception(ParseError()) return (validated_data, errors)
def dequeue(self, batch=None): """ Get batch number of items from the queue. """ if batch is None: batch = self.batch with self.redis_client.pipeline() as pipe: pipe.multi() pipe.lrange(self.key, 0, batch - 1) if batch != 0: pipe.ltrim(self.key, batch, -1) else: # special case for deleting everything pipe.ltrim(self.key, 1, 0) result = pipe.execute()[0] if self.compress: result = [util.decode_gzip(item, encoding=None) for item in result] if self.json: # simplejson.loads returns Unicode strings result = [simplejson.loads(item, encoding='utf-8') for item in result] return result
def dequeue(self, batch=None): """ Get batch number of items from the queue. """ if batch is None: batch = self.batch with self.redis_client.pipeline() as pipe: pipe.multi() pipe.lrange(self.key, 0, batch - 1) if batch != 0: pipe.ltrim(self.key, batch, -1) else: # special case for deleting everything pipe.ltrim(self.key, 1, 0) result = pipe.execute()[0] if self.compress: result = [ util.decode_gzip(item, encoding=None) for item in result ] if self.json: # simplejson.loads returns Unicode strings result = [ simplejson.loads(item, encoding='utf-8') for item in result ] return result
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): schedule_export_reports.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in uploaded_text) self.assertFalse(self.geoip_data['London']['ip'] in uploaded_text) send_reports = json.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_upload(self, celery, session, stats): ExportConfigFactory( name='backup', batch=3, schema='s3', url='s3://bucket/backups/{source}/{api_key}/{year}/{month}/{day}') ApiKeyFactory(valid_key='e5444-794') session.flush() reports = self.add_reports(celery, 3) self.add_reports(celery, 3, api_key='e5444-794', source='gnss') self.add_reports(celery, 3, api_key='e5444-794', source='fused') self.add_reports(celery, 3, api_key=None) mock_keys = [] with mock_s3(mock_keys): update_incoming.delay().get() assert len(mock_keys) == 4 keys = [] test_export = None for mock_key in mock_keys: assert mock_key.set_contents_from_string.called assert mock_key.content_encoding == 'gzip' assert mock_key.content_type == 'application/json' assert mock_key.key.startswith('backups/') assert mock_key.key.endswith('.json.gz') assert mock_key.close.called keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second and third path segment from key names groups = [tuple(key.split('/')[1:3]) for key in keys] assert (set(groups) == set([('gnss', 'test'), ('gnss', 'no_key'), ('gnss', 'e5444-794'), ('fused', 'e5444-794')])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) send_reports = simplejson.loads(uploaded_text)['items'] assert len(send_reports) == 3 expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] assert set(expect) == set(gotten) stats.check(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_upload(self, celery, session, metricsmock): ApiKeyFactory(valid_key="e5444-7946") ExportConfigFactory( name="test", batch=4, schema="geosubmit", url="http://127.0.0.1:9/v2/geosubmit?key=external", ) session.flush() reports = [] reports.extend(self.add_reports(celery, 1, source="gnss")) reports.extend(self.add_reports(celery, 1, api_key="e5444e9f-7946")) reports.extend( self.add_reports(celery, 1, api_key=None, source="fused")) reports.extend(self.add_reports(celery, 1, set_position=False)) with requests_mock.Mocker() as mock: mock.register_uri("POST", requests_mock.ANY, text="{}") update_incoming.delay().get() assert mock.call_count == 1 req = mock.request_history[0] # check headers assert req.headers["Content-Type"] == "application/json" assert req.headers["Content-Encoding"] == "gzip" assert req.headers["User-Agent"] == "ichnaea" body = util.decode_gzip(req.body) send_reports = json.loads(body)["items"] assert len(send_reports) == 4 for field in ("accuracy", "source", "timestamp"): expect = [(report["position"] or {}).get(field) for report in reports] gotten = [(report["position"] or {}).get(field) for report in send_reports] assert set(expect) == set(gotten) assert set([w["ssid"] for w in send_reports[0]["wifiAccessPoints"] ]) == set(["my-wifi"]) assert metricsmock.has_record("incr", "data.export.batch", value=1, tags=["key:test"]) assert metricsmock.has_record("incr", "data.export.upload", value=1, tags=["key:test", "status:200"]) assert metricsmock.has_record("timing", "data.export.upload.timing", tags=["key:test"])
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log=True)) self.session.flush() reports = self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): schedule_export_reports.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in uploaded_text) self.assertFalse(self.geoip_data['London']['ip'] in uploaded_text) send_reports = json.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = [] reports.extend( self.add_reports(1, email='secretemail@localhost', ip=self.geoip_data['London']['ip'])) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = util.decode_gzip(req.body) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in body) self.assertFalse(self.geoip_data['London']['ip'] in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_upload(self): ApiKeyFactory(valid_key='e5444-794') self.session.flush() reports = self.add_reports(3) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): update_incoming.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) send_reports = simplejson.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_upload(self, celery, session, stats): ApiKeyFactory(valid_key='e5444-794') ExportConfigFactory( name='test', batch=3, schema='geosubmit', url='http://127.0.0.1:9/v2/geosubmit?key=external') session.flush() reports = [] reports.extend(self.add_reports( celery, 1, source='gnss')) reports.extend(self.add_reports( celery, 1, api_key='e5444e9f-7946')) reports.extend(self.add_reports( celery, 1, api_key=None, source='fused')) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') update_incoming.delay().get() assert mock.call_count == 1 req = mock.request_history[0] # check headers assert req.headers['Content-Type'] == 'application/json' assert req.headers['Content-Encoding'] == 'gzip' assert req.headers['User-Agent'] == 'ichnaea' body = util.decode_gzip(req.body) send_reports = simplejson.loads(body)['items'] assert len(send_reports) == 3 for field in ('accuracy', 'source', 'timestamp'): expect = [report['position'].get(field) for report in reports] gotten = [report['position'].get(field) for report in send_reports] assert set(expect) == set(gotten) assert ( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]) == set(['my-wifi'])) stats.check(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = [] reports.extend(self.add_reports(1, email='secretemail@localhost', ip=self.geoip_data['London']['ip'])) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = util.decode_gzip(req.body) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in body) self.assertFalse(self.geoip_data['London']['ip'] in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_upload(self, celery, session, stats): ApiKeyFactory(valid_key='e5444-794') ExportConfigFactory(name='test', batch=3, schema='geosubmit', url='http://127.0.0.1:9/v2/geosubmit?key=external') session.flush() reports = [] reports.extend(self.add_reports(celery, 1, source='gnss')) reports.extend(self.add_reports(celery, 1, api_key='e5444e9f-7946')) reports.extend( self.add_reports(celery, 1, api_key=None, source='fused')) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') update_incoming.delay().get() assert mock.call_count == 1 req = mock.request_history[0] # check headers assert req.headers['Content-Type'] == 'application/json' assert req.headers['Content-Encoding'] == 'gzip' assert req.headers['User-Agent'] == 'ichnaea' body = util.decode_gzip(req.body) send_reports = simplejson.loads(body)['items'] assert len(send_reports) == 3 for field in ('accuracy', 'source', 'timestamp'): expect = [report['position'].get(field) for report in reports] gotten = [report['position'].get(field) for report in send_reports] assert set(expect) == set(gotten) assert (set([w['ssid'] for w in send_reports[0]['wifiAccessPoints'] ]) == set(['my-wifi'])) stats.check(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def preprocess_request(request, schema, response=JSONError, accept_empty=False): body = {} errors = [] validated = {} body = request.body if body: if request.headers.get('Content-Encoding') == 'gzip': # handle gzip request bodies try: body = util.decode_gzip(body) except zlib.error: # pragma: no cover errors.append(dict(name=None, description=MSG_GZIP)) if not errors: try: body = loads(body, encoding=request.charset) except ValueError as e: errors.append(dict(name=None, description=e.message)) else: # pragma: no cover errors.append(dict(name=None, description=MSG_EMPTY)) if accept_empty and not body: return ({}, errors) if not body or (errors and response is not None): if response is not None: raise response(errors) # schema validation, but report at most one error at a time verify_schema(schema, body, errors, validated) if errors and response is not None: # the response / None check is used in schema tests raise response(errors) return (validated, errors)
def test_upload(self): ApiKeyFactory(valid_key='e5444-794') self.session.flush() reports = [] reports.extend(self.add_reports(1)) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') update_incoming.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') body = util.decode_gzip(req.body) send_reports = simplejson.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_upload(self): ApiKeyFactory(valid_key='e5444-794') self.session.flush() reports = [] reports.extend(self.add_reports(1)) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') body = util.decode_gzip(req.body) send_reports = simplejson.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b'foo')) assert data == u'foo'
def test_decode_gzip_error(self): with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:1]) with pytest.raises(GZIPDecodeError): util.decode_gzip(self.gzip_foo[:5])
def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b"foo")) assert data == b"foo"
def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == b"foo"
def test_no_encoding(self): data = util.encode_gzip(b'\x00ab', encoding=None) assert isinstance(data, bytes) result = util.decode_gzip(data, encoding=None) assert isinstance(result, bytes) assert result == b'\x00ab'
def test_roundtrip_gzip(self): data = util.decode_gzip(util.encode_gzip(b'foo')) self.assertEqual(data, u'foo')
def test_no_encoding(self): data = util.encode_gzip(b'\x00ab', encoding=None) self.assertTrue(isinstance(data, bytes)) result = util.decode_gzip(data, encoding=None) self.assertTrue(isinstance(result, bytes)) self.assertEqual(result, b'\x00ab')
def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) assert data == u'foo'
def test_upload(self, celery, session, metricsmock): ExportConfigFactory( name="backup", batch=3, schema="s3", url="s3://bucket/backups/{source}/{api_key}/{year}/{month}/{day}", ) ApiKeyFactory(valid_key="e5444-794") session.flush() reports = self.add_reports(celery, 3) self.add_reports(celery, 3, api_key="e5444-794", source="gnss") self.add_reports(celery, 3, api_key="e5444-794", source="fused") self.add_reports(celery, 3, api_key=None) mock_conn = mock.MagicMock() mock_bucket = mock.MagicMock() mock_obj = mock.MagicMock() mock_conn.return_value.Bucket.return_value = mock_bucket mock_bucket.Object.return_value = mock_obj with mock.patch.object(boto3, "resource", mock_conn): update_incoming.delay().get() obj_calls = mock_bucket.Object.call_args_list put_calls = mock_obj.put.call_args_list assert len(obj_calls) == 4 assert len(put_calls) == 4 keys = [] test_export = None for obj_call, put_call in zip(obj_calls, put_calls): s3_key = obj_call[0][0] assert s3_key.startswith("backups/") assert s3_key.endswith(".json.gz") assert put_call[1]["Body"] assert put_call[1]["ContentType"] == "application/json" assert put_call[1]["ContentEncoding"] == "gzip" keys.append(s3_key) if "test" in s3_key: test_export = put_call[1]["Body"] # extract second and third path segment from key names groups = [tuple(key.split("/")[1:3]) for key in keys] assert set(groups) == set([ ("gnss", "test"), ("gnss", "no_key"), ("gnss", "e5444-794"), ("fused", "e5444-794"), ]) # check uploaded content uploaded_text = util.decode_gzip(test_export) send_reports = json.loads(uploaded_text)["items"] assert len(send_reports) == 3 expect = [report["position"]["accuracy"] for report in reports] gotten = [report["position"]["accuracy"] for report in send_reports] assert set(expect) == set(gotten) assert (len( metricsmock.filter_records("incr", "data.export.batch", value=1, tags=["key:backup"])) == 4) assert (len( metricsmock.filter_records( "incr", "data.export.upload", value=1, tags=["key:backup", "status:success"], )) == 4) assert (len( metricsmock.filter_records("timing", "data.export.upload.timing", tags=["key:backup"])) == 4)
def test_decode_gzip(self): data = util.decode_gzip(self.gzip_foo) self.assertEqual(data, u'foo')