def test_upload_retried(self): self.add_reports(3) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, [ {'text': '', 'status_code': 500}, {'text': '{}', 'status_code': 404}, {'text': '{}', 'status_code': 200}, ]) # simulate celery retry handling for i in range(5): try: schedule_export_reports.delay().get() except Retry: continue else: break self.fail('Task should have succeeded') self.assertEqual(mock.call_count, 3) self.check_stats( counter=[('items.export.test.batches', 1, 1), ('items.export.test.upload_status.200', 1), ('items.export.test.upload_status.404', 1), ('items.export.test.upload_status.500', 1)], timer=[('items.export.test.upload', 3)], )
def test_nickname_too_short(self): self.add_reports(nickname=u'a') schedule_export_reports.delay().get() queue = self.celery_app.data_queues['update_score'] self.assertEqual(queue.size(), 0) self.assertEqual(self.session.query(User).count(), 0)
def test_upload(self): reports = self.add_reports(3, email='secretemail@localhost') with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = decode_gzip(req.body) # make sure we don't accidentally leak emails self.assertFalse('secretemail' in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats( counter=[('items.export.test.batches', 1, 1), ('items.export.test.upload_status.200', 1)], timer=['items.export.test.upload'], )
def test_stats(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) schedule_export_reports.delay().get() update_cell.delay().get() for i in range(16): update_wifi.delay(shard_id='%x' % i).get() self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:internal']), ('data.report.upload', 2, 3), ('data.report.upload', 1, 3, ['key:test']), ('data.report.upload', 1, 6, ['key:e5444-794']), ('data.observation.insert', 1, 12, ['type:cell']), ('data.observation.upload', 1, 3, ['type:cell', 'key:test']), ('data.observation.upload', 1, 6, ['type:wifi', 'key:test']), ('data.observation.upload', 0, ['type:cell', 'key:no_key']), ('data.observation.upload', 1, 6, ['type:cell', 'key:e5444-794']), ('data.observation.upload', 1, 12, ['type:wifi', 'key:e5444-794']), ]) # we get a variable number of statsd messages and are only # interested in the sum-total insert_msgs = [msg for msg in self.stats_client.msgs if (msg.startswith('data.observation.insert') and 'type:wifi' in msg)] self.assertEqual( sum([int(msg.split(':')[1].split('|')[0]) for msg in insert_msgs]), 24)
def test_stats(self): self.session.add(ApiKey(valid_key='e5444-794', log=True)) self.session.flush() self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) schedule_export_reports.delay().get() update_cell.delay().get() update_wifi.delay().get() self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:internal']), ('data.report.upload', 2, 3), ('data.report.upload', 1, 3, ['key:test']), ('data.report.upload', 1, 6, ['key:e5444-794']), ('data.observation.insert', 1, 12, ['type:cell']), ('data.observation.insert', 1, 24, ['type:wifi']), ('data.observation.upload', 1, 3, ['type:cell', 'key:test']), ('data.observation.upload', 1, 6, ['type:wifi', 'key:test']), ('data.observation.upload', 0, ['type:cell', 'key:no_key']), ('data.observation.upload', 1, 6, ['type:cell', 'key:e5444-794']), ('data.observation.upload', 1, 12, ['type:wifi', 'key:e5444-794']), ])
def _update_all(self): schedule_export_reports.delay().get() for shard_id in CellShard.shards().keys(): update_cell.delay(shard_id=shard_id).get() for shard_id in WifiShard.shards().keys(): update_wifi.delay(shard_id=shard_id).get()
def test_datamap(self): self.add_reports(1, cell_factor=0, wifi_factor=2, lat=50.0, lon=10.0) self.add_reports(2, cell_factor=0, wifi_factor=2, lat=20.0, lon=-10.0) schedule_export_reports.delay().get() self.assertEqual( self.celery_app.data_queues['update_datamap_ne'].size(), 1) self.assertEqual( self.celery_app.data_queues['update_datamap_sw'].size(), 1)
def test_stats(self): self.add_reports(3, api_key='test') schedule_export_reports.delay().get() update_cell.delay().get() update_wifi.delay().get() self.check_stats( counter=[('items.api_log.test.uploaded.reports', 1, 3), ('items.uploaded.reports', 1, 3)])
def test_email_too_long(self): self.add_reports(nickname=self.nickname, email=u'a' * 255 + u'@email.com') schedule_export_reports.delay().get() users = self.session.query(User).all() self.assertEqual(len(users), 1) self.assertEqual(users[0].nickname, self.nickname) self.assertEqual(users[0].email, u'')
def test_nickname(self): self.add_reports(wifi_factor=0, nickname=self.nickname) schedule_export_reports.delay().get() queue = self.celery_app.data_queues['update_score'] self.assertEqual(queue.size(), 2) users = self.session.query(User).all() self.assertEqual(len(users), 1) self.assertEqual(users[0].nickname, self.nickname) self.assertEqual(users[0].email, '')
def test_one_queue(self): self.add_reports(3) schedule_export_reports.delay().get() # data from one queue was processed for queue_key, num in [ ('queue_export_test', 0), ('queue_export_everything', 3), ('queue_export_no_test', 0)]: self.assertEqual(self.export_queues[queue_key].size(), num)
def test_wifi_invalid(self): self.add_reports(cell_factor=0, wifi_factor=1, wifi_key='abcd') schedule_export_reports.delay().get() update_wifi.delay().get() self.check_stats(counter=[ ('data.report.upload', 1, 1, ['key:test']), ('data.report.drop', 1, 1, ['reason:malformed', 'key:test']), ('data.observation.drop', 1, 1, ['type:wifi', 'reason:malformed', 'key:test']), ])
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): schedule_export_reports.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in uploaded_text) self.assertFalse(self.geoip_data['London']['ip'] in uploaded_text) send_reports = json.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_cell_invalid(self): self.add_reports(cell_factor=1, wifi_factor=0, cell_mcc=-2) schedule_export_reports.delay().get() update_cell.delay().get() self.assertEqual(self.session.query(Cell).count(), 0) self.check_stats(counter=[ ('data.report.upload', 1, 1, ['key:test']), ('data.report.drop', 1, 1, ['reason:malformed', 'key:test']), ('data.observation.drop', 1, 1, ['type:cell', 'reason:malformed', 'key:test']), ])
def test_email_header_update(self): user = User(nickname=self.nickname, email=self.email) self.session.add(user) self.session.commit() self.add_reports(nickname=self.nickname, email=u'new' + self.email) schedule_export_reports.delay().get() users = self.session.query(User).all() self.assertEqual(len(users), 1) self.assertEqual(users[0].nickname, self.nickname) self.assertEqual(users[0].email, u'new' + self.email)
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log=True)) self.session.flush() reports = self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) mock_keys = [] with mock_s3(mock_keys): schedule_export_reports.delay().get() self.assertEqual(len(mock_keys), 4) keys = [] test_export = None for mock_key in mock_keys: self.assertTrue(mock_key.set_contents_from_string.called) self.assertEqual(mock_key.content_encoding, 'gzip') self.assertEqual(mock_key.content_type, 'application/json') self.assertTrue(mock_key.key.startswith('backups/')) self.assertTrue(mock_key.key.endswith('.json.gz')) self.assertTrue(mock_key.close.called) keys.append(mock_key.key) if 'test' in mock_key.key: test_export = mock_key # extract second path segment from key names queue_keys = [key.split('/')[1] for key in keys] self.assertEqual(set(queue_keys), set(['test', 'no_key', 'e5444-794'])) # check uploaded content args, kw = test_export.set_contents_from_string.call_args uploaded_data = args[0] uploaded_text = util.decode_gzip(uploaded_data) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in uploaded_text) self.assertFalse(self.geoip_data['London']['ip'] in uploaded_text) send_reports = json.loads(uploaded_text)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.check_stats(counter=[ ('data.export.batch', 4, 1, ['key:backup']), ('data.export.upload', 4, ['key:backup', 'status:success']), ], timer=[ ('data.export.upload', 4, ['key:backup']), ])
def test_upload_wifi(self): reports = self.add_reports(cell_factor=0, wifi_factor=1) schedule_export_reports.delay().get() update_wifi.delay().get() position = reports[0]['position'] wifi_data = reports[0]['wifiAccessPoints'][0] wifis = self.session.query(Wifi).all() self.assertEqual(len(wifis), 1) wifi = wifis[0] self.assertEqual(wifi.lat, position['latitude']) self.assertEqual(wifi.lon, position['longitude']) self.assertEqual(wifi.key, wifi_data['macAddress']) self.assertEqual(wifi.total_measures, 1)
def test_nickname(self): self.add_reports(wifi_factor=0, nickname=self.nickname) schedule_export_reports.delay().get() queue = self.celery_app.data_queues['update_score'] self.assertEqual(queue.size(), 2) scores = queue.dequeue() score_keys = set([ScoreKey(score['key']) for score in scores]) self.assertEqual( score_keys, set([ScoreKey.location, ScoreKey.new_cell])) users = self.session.query(User).all() self.assertEqual(len(users), 1) self.assertEqual(users[0].nickname, self.nickname)
def test_nickname(self): self.add_reports(wifi_factor=0, nickname=self.nickname) schedule_export_reports.delay().get() queue = self.celery_app.data_queues['update_score'] self.assertEqual(queue.size(), 2) scores = queue.dequeue() score_keys = set([score['hashkey'].key for score in scores]) self.assertEqual(score_keys, set([ScoreKey.location, ScoreKey.new_cell])) users = self.session.query(User).all() self.assertEqual(len(users), 1) self.assertEqual(users[0].nickname, self.nickname)
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = [] reports.extend( self.add_reports(1, email='secretemail@localhost', ip=self.geoip_data['London']['ip'])) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = util.decode_gzip(req.body) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in body) self.assertFalse(self.geoip_data['London']['ip'] in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_wifi(self): reports = self.add_reports(cell_factor=0, wifi_factor=1) schedule_export_reports.delay().get() update_wifi.delay().get() position = reports[0]['position'] wifi_data = reports[0]['wifiAccessPoints'][0] mac = wifi_data['macAddress'] shard = WifiShard.shard_model(mac) wifis = self.session.query(shard).all() self.assertEqual(len(wifis), 1) wifi = wifis[0] self.assertEqual(wifi.lat, position['latitude']) self.assertEqual(wifi.lon, position['longitude']) self.assertEqual(wifi.mac, wifi_data['macAddress']) self.assertEqual(wifi.samples, 1)
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log_submit=True)) self.session.flush() reports = [] reports.extend(self.add_reports(1, email='secretemail@localhost', ip=self.geoip_data['London']['ip'])) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') # check body body = util.decode_gzip(req.body) # make sure we don't accidentally leak emails or IPs self.assertFalse('secretemail' in body) self.assertFalse(self.geoip_data['London']['ip'] in body) # make sure a standards based json can decode this data # and none of our internal_json structures end up in it send_reports = json.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_upload_duplicated_wifi(self): self.add_reports(cell_factor=0, wifi_factor=1) # duplicate the wifi entry inside the report queue = self.celery_app.export_queues['internal'] items = queue.dequeue(queue.queue_key()) report = items[0]['report'] wifi = report['wifiAccessPoints'][0].copy() report['wifiAccessPoints'].append(wifi) report['wifiAccessPoints'][1]['signalStrength'] += 2 queue.enqueue(items, queue.queue_key()) schedule_export_reports.delay().get() update_wifi.delay().get() wifis = self.session.query(Wifi).all() self.assertEqual(len(wifis), 1) self.assertEqual(wifis[0].total_measures, 1)
def test_upload_duplicated_cell(self): self.add_reports(cell_factor=1, wifi_factor=0) # duplicate the cell entry inside the report queue = self.celery_app.export_queues['internal'] items = queue.dequeue(queue.queue_key()) report = items[0]['report'] cell = report['cellTowers'][0].copy() report['cellTowers'].append(cell) report['cellTowers'][1]['signalStrength'] += 2 queue.enqueue(items, queue.queue_key()) schedule_export_reports.delay().get() update_cell.delay().get() cells = self.session.query(Cell).all() self.assertEqual(len(cells), 1) self.assertEqual(cells[0].total_measures, 1)
def test_one_queue(self): self.add_reports(3) triggered = schedule_export_reports.delay().get() self.assertEqual(triggered, 1) # data from one queue was processed expected = [ (EXPORT_QUEUE_PREFIX + 'test', 0), (EXPORT_QUEUE_PREFIX + 'everything', 3), (EXPORT_QUEUE_PREFIX + 'no_test', 0), ] for key, num in expected: self.assertEqual(self.queue_length(key), num)
def test_upload_cell(self): reports = self.add_reports(cell_factor=1, wifi_factor=0) schedule_export_reports.delay().get() update_cell.delay().get() position = reports[0]['position'] cell_data = reports[0]['cellTowers'][0] cells = self.session.query(Cell).all() self.assertEqual(len(cells), 1) cell = cells[0] self.assertEqual(cell.lat, position['latitude']) self.assertEqual(cell.lon, position['longitude']) self.assertEqual(cell.radio.name, cell_data['radioType']) self.assertEqual(cell.mcc, cell_data['mobileCountryCode']) self.assertEqual(cell.mnc, cell_data['mobileNetworkCode']) self.assertEqual(cell.lac, cell_data['locationAreaCode']) self.assertEqual(cell.cid, cell_data['cellId']) self.assertEqual(cell.psc, cell_data['primaryScramblingCode']) self.assertEqual(cell.total_measures, 1)
def test_wifi_duplicated(self): self.add_reports(cell_factor=0, wifi_factor=1) # duplicate the wifi entry inside the report queue = self.celery_app.export_queues['internal'] items = queue.dequeue(queue.queue_key()) report = items[0]['report'] wifi = report['wifiAccessPoints'][0] mac = wifi['macAddress'] report['wifiAccessPoints'].append(wifi.copy()) report['wifiAccessPoints'].append(wifi.copy()) report['wifiAccessPoints'][1]['signalStrength'] += 2 report['wifiAccessPoints'][2]['signalStrength'] -= 2 queue.enqueue(items, queue.queue_key()) schedule_export_reports.delay().get() update_wifi.delay().get() shard = WifiShard.shard_model(mac) wifis = self.session.query(shard).all() self.assertEqual(len(wifis), 1) self.assertEqual(wifis[0].samples, 1)
def test_upload(self): ApiKeyFactory(valid_key='e5444-794') self.session.flush() reports = [] reports.extend(self.add_reports(1)) reports.extend(self.add_reports(1, api_key='e5444e9f-7946')) reports.extend(self.add_reports(1, api_key=None)) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, text='{}') schedule_export_reports.delay().get() self.assertEqual(mock.call_count, 1) req = mock.request_history[0] # check headers self.assertEqual(req.headers['Content-Type'], 'application/json') self.assertEqual(req.headers['Content-Encoding'], 'gzip') self.assertEqual(req.headers['User-Agent'], 'ichnaea') body = util.decode_gzip(req.body) send_reports = simplejson.loads(body)['items'] self.assertEqual(len(send_reports), 3) expect = [report['position']['accuracy'] for report in reports] gotten = [report['position']['accuracy'] for report in send_reports] self.assertEqual(set(expect), set(gotten)) self.assertEqual( set([w['ssid'] for w in send_reports[0]['wifiAccessPoints']]), set(['my-wifi'])) self.check_stats(counter=[ ('data.export.batch', 1, 1, ['key:test']), ('data.export.upload', 1, ['key:test', 'status:200']), ], timer=[ ('data.export.upload', ['key:test']), ])
def test_one_queue(self): self.add_reports(3) triggered = schedule_export_reports.delay().get() self.assertEqual(triggered, 1) # data from one queue was processed export_queues = self.celery_app.export_queues expected = [ (export_queues['test'].queue_key(), 0), (export_queues['everything'].queue_key(), 3), (export_queues['no_test'].queue_key(), 0), ] for key, num in expected: self.assertEqual(self.queue_length(key), num)
def test_upload(self): self.session.add(ApiKey(valid_key='e5444-794', log=True)) self.session.flush() self.add_reports(3, email='secretemail@localhost', ip=self.geoip_data['London']['ip']) self.add_reports(6, api_key='e5444-794') self.add_reports(3, api_key=None) schedule_export_reports.delay().get() update_cell.delay().get() update_wifi.delay().get() self.assertEqual(self.session.query(Cell).count(), 12) self.assertEqual(self.session.query(Wifi).count(), 24) self.check_stats(counter=[ ('items.export.internal.batches', 1, 1), ('items.api_log.test.uploaded.cell_observations', 1, 3), ('items.api_log.test.uploaded.wifi_observations', 1, 6), ('items.api_log.no_key.uploaded.cell_observations', 0), ('items.api_log.e5444-794.uploaded.cell_observations', 1, 6), ('items.api_log.e5444-794.uploaded.wifi_observations', 1, 12), ])
def test_upload_retried(self): self.add_reports(3) with requests_mock.Mocker() as mock: mock.register_uri('POST', requests_mock.ANY, [ { 'text': '', 'status_code': 500 }, { 'text': '{}', 'status_code': 404 }, { 'text': '{}', 'status_code': 200 }, ]) # simulate celery retry handling for i in range(5): try: schedule_export_reports.delay().get() except Retry: continue else: break self.fail('Task should have succeeded') self.assertEqual(mock.call_count, 3) self.check_stats( counter=[('items.export.test.batches', 1, 1), ('items.export.test.upload_status.200', 1), ('items.export.test.upload_status.404', 1), ('items.export.test.upload_status.500', 1)], timer=[('items.export.test.upload', 3)], )
def test_upload_invalid_wifi(self): self.add_reports(cell_factor=0, wifi_factor=1, wifi_key='abcd') schedule_export_reports.delay().get() update_wifi.delay().get() self.assertEqual(self.session.query(Wifi).count(), 0)
def test_bluetooth_invalid(self): self.add_reports(blue_factor=1, cell_factor=0, wifi_factor=0, blue_key='abcd') schedule_export_reports.delay().get()
def test_multiple_batches(self): self.add_reports(10) schedule_export_reports.delay().get() self.assertEqual(self.queue_length(EXPORT_QUEUE_PREFIX + 'test'), 1)
def test_upload_invalid_bluetooth(self): self.add_reports(blue_factor=1, cell_factor=0, wifi_factor=0, blue_key='abcd') schedule_export_reports.delay().get()
def test_one_batch(self): self.add_reports(5) schedule_export_reports.delay().get() self.assertEqual(self.queue_length(self.test_queue_key), 2)
def test_multiple_batches(self): self.add_reports(10) schedule_export_reports.delay().get() self.assertEqual(self.queue_length(self.test_queue_key), 1)
def test_bluetooth(self): self.add_reports(blue_factor=1, cell_factor=0, wifi_factor=0) schedule_export_reports.delay().get()
def test_upload_invalid_position(self): self.add_reports(1, cell_factor=1, wifi_factor=0, lat=-90.1) self.add_reports(1, cell_factor=1, wifi_factor=0) schedule_export_reports.delay().get() update_cell.delay().get() self.assertEqual(self.session.query(Cell).count(), 1)