def test_schedule_wifi_measures(self): session = self.db_master_session blocks = schedule_wifimeasure_archival.delay(batch=1).get() self.assertEquals(len(blocks), 0) batch_size = 10 measures = [] for i in range(batch_size * 2): measures.append(WifiMeasure(created=self.really_old)) session.add_all(measures) session.flush() start_id = measures[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 2) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) block = blocks[1] self.assertEquals(block, (start_id + batch_size, start_id + 2 * batch_size)) blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 0)
def test_schedule_wifi_observations(self): blocks = schedule_wifimeasure_archival.delay(batch=1).get() self.assertEquals(len(blocks), 0) batch_size = 10 obs = WifiObservationFactory.create_batch(batch_size * 2, created=self.old) self.session.flush() start_id = obs[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 2) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) block = blocks[1] self.assertEquals(block, (start_id + batch_size, start_id + 2 * batch_size)) blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 0)
def test_schedule_wifi_observations(self): blocks = schedule_wifimeasure_archival.delay(batch=1).get() self.assertEquals(len(blocks), 0) batch_size = 10 obs = WifiObservationFactory.create_batch( batch_size * 2, created=self.old) self.session.flush() start_id = obs[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 2) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) block = blocks[1] self.assertEquals(block, (start_id + batch_size, start_id + 2 * batch_size)) blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 0)
def test_backup_wifi_to_s3(self): session = self.db_master_session batch_size = 10 measures = [] for i in range(batch_size): measures.append(WifiMeasure(created=self.really_old)) session.add_all(measures) session.flush() start_id = measures[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) with mock_s3(): with patch.object(S3Backend, 'backup_archive', lambda x, y, z: True): write_wifimeasure_s3_backups.delay(cleanup_zip=False).get() msgs = self.heka_client.stream.msgs info_msgs = [m for m in msgs if m.type == 'oldstyle'] self.assertEquals(1, len(info_msgs)) info = info_msgs[0] fname = info.payload.split(":")[-1] myzip = ZipFile(fname) try: contents = set(myzip.namelist()) expected_contents = set( ['alembic_revision.txt', 'wifi_measure.csv']) self.assertEquals(expected_contents, contents) finally: myzip.close() blocks = session.query(MeasureBlock).all() self.assertEquals(len(blocks), 1) block = blocks[0] actual_sha = hashlib.sha1() actual_sha.update(open(fname, 'rb').read()) self.assertEquals(block.archive_sha, actual_sha.digest()) self.assertTrue(block.s3_key is not None) self.assertTrue('/wifi_' in block.s3_key) self.assertTrue(block.archive_date is None)
def test_backup_wifi_to_s3(self): session = self.db_master_session batch_size = 10 measures = [] for i in range(batch_size): measures.append(WifiMeasure(created=self.really_old)) session.add_all(measures) session.flush() start_id = measures[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) with mock_s3(): with patch.object(S3Backend, 'backup_archive', lambda x, y, z: True): write_wifimeasure_s3_backups.delay(cleanup_zip=False).get() msgs = self.heka_client.stream.msgs info_msgs = [m for m in msgs if m.type == 'oldstyle'] self.assertEquals(1, len(info_msgs)) info = info_msgs[0] fname = info.payload.split(":")[-1] myzip = ZipFile(fname) try: contents = set(myzip.namelist()) expected_contents = set(['alembic_revision.txt', 'wifi_measure.csv']) self.assertEquals(expected_contents, contents) finally: myzip.close() blocks = session.query(MeasureBlock).all() self.assertEquals(len(blocks), 1) block = blocks[0] actual_sha = hashlib.sha1() actual_sha.update(open(fname, 'rb').read()) self.assertEquals(block.archive_sha, actual_sha.digest()) self.assertTrue(block.s3_key is not None) self.assertTrue('/wifi_' in block.s3_key) self.assertTrue(block.archive_date is None)
def test_backup_wifi_to_s3(self): batch_size = 10 obs = WifiObservationFactory.create_batch(batch_size, created=self.old) self.session.flush() start_id = obs[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) write_wifimeasure_s3_backups.delay(cleanup_zip=False).get() blocks = self.session.query(ObservationBlock).all() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEqual(block.archive_sha, '20bytes_mean_success') self.assertEqual(block.s3_key, 'skipped') self.assertTrue(block.archive_date is None)
def test_backup_wifi_to_s3(self): batch_size = 10 obs = WifiObservationFactory.create_batch(batch_size, created=self.old) self.session.flush() start_id = obs[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) with mock_s3(): with patch.object(S3Backend, 'backup_archive', lambda x, y, z: True): write_wifimeasure_s3_backups.delay(cleanup_zip=False).get() raven_msgs = self.raven_client.msgs fname = [ m['message'].split(':')[1] for m in raven_msgs if m['message'].startswith('s3.backup:') ][0] myzip = ZipFile(fname) try: contents = set(myzip.namelist()) expected_contents = set( ['alembic_revision.txt', 'wifi_measure.csv']) self.assertEquals(expected_contents, contents) finally: myzip.close() blocks = self.session.query(ObservationBlock).all() self.assertEquals(len(blocks), 1) block = blocks[0] actual_sha = hashlib.sha1() actual_sha.update(open(fname, 'rb').read()) self.assertEquals(block.archive_sha, actual_sha.digest()) self.assertTrue(block.s3_key is not None) self.assertTrue('/wifi_' in block.s3_key) self.assertTrue(block.archive_date is None)
def test_backup_wifi_to_s3(self): batch_size = 10 obs = WifiObservationFactory.create_batch(batch_size, created=self.old) self.session.flush() start_id = obs[0].id blocks = schedule_wifimeasure_archival.delay(batch=batch_size).get() self.assertEquals(len(blocks), 1) block = blocks[0] self.assertEquals(block, (start_id, start_id + batch_size)) with mock_s3(): with patch.object(S3Backend, 'backup_archive', lambda x, y, z: True): write_wifimeasure_s3_backups.delay(cleanup_zip=False).get() raven_msgs = self.raven_client.msgs fname = [m['message'].split(':')[1] for m in raven_msgs if m['message'].startswith('s3.backup:')][0] myzip = ZipFile(fname) try: contents = set(myzip.namelist()) expected_contents = set(['alembic_revision.txt', 'wifi_measure.csv']) self.assertEquals(expected_contents, contents) finally: myzip.close() blocks = self.session.query(ObservationBlock).all() self.assertEquals(len(blocks), 1) block = blocks[0] actual_sha = hashlib.sha1() actual_sha.update(open(fname, 'rb').read()) self.assertEquals(block.archive_sha, actual_sha.digest()) self.assertTrue(block.s3_key is not None) self.assertTrue('/wifi_' in block.s3_key) self.assertTrue(block.archive_date is None)