Ejemplo n.º 1
0
    def test_schedule_cell_measures(self):
        session = self.db_master_session

        blocks = schedule_cellmeasure_archival(batch=1)
        self.assertEquals(len(blocks), 0)

        measures = []
        for i in range(20):
            measures.append(CellMeasure(created=self.really_old))
        session.add_all(measures)
        session.flush()
        start_id = measures[0].id

        blocks = schedule_cellmeasure_archival(batch=15)
        self.assertEquals(len(blocks), 1)
        block = blocks[0]
        self.assertEquals(block, (start_id, start_id + 15))

        blocks = schedule_cellmeasure_archival(batch=6)
        self.assertEquals(len(blocks), 0)

        blocks = schedule_cellmeasure_archival(batch=5)
        self.assertEquals(len(blocks), 1)
        block = blocks[0]
        self.assertEquals(block, (start_id + 15, start_id + 20))

        blocks = schedule_cellmeasure_archival(batch=1)
        self.assertEquals(len(blocks), 0)
Ejemplo n.º 2
0
    def test_backup_cell_to_s3(self):
        session = self.db_master_session
        batch_size = 10
        measures = []
        for i in range(batch_size):
            measures.append(CellMeasure(created=self.really_old))
        session.add_all(measures)
        session.flush()
        start_id = measures[0].id

        blocks = schedule_cellmeasure_archival(batch=batch_size)
        self.assertEquals(len(blocks), 1)
        block = blocks[0]
        self.assertEquals(block, (start_id, start_id + batch_size))

        with mock_s3():
            with patch.object(S3Backend,
                              'backup_archive', lambda x, y, z: True):
                write_cellmeasure_s3_backups(cleanup_zip=False)

                msgs = self.heka_client.stream.msgs
                info_msgs = [m for m in msgs if m.type == 'oldstyle']
                self.assertEquals(1, len(info_msgs))
                info = info_msgs[0]
                fname = info.payload.split(":")[-1]

                myzip = ZipFile(fname)
                try:
                    contents = set(myzip.namelist())
                    expected_contents = set(['alembic_revision.txt',
                                             'cell_measure.csv'])
                    self.assertEquals(expected_contents, contents)
                finally:
                    myzip.close()

        blocks = session.query(MeasureBlock).all()

        self.assertEquals(len(blocks), 1)
        block = blocks[0]

        actual_sha = hashlib.sha1()
        actual_sha.update(open(fname, 'rb').read())
        self.assertEquals(block.archive_sha, actual_sha.digest())
        self.assertTrue(block.s3_key is not None)
        self.assertTrue('/cell_' in block.s3_key)
        self.assertTrue(block.archive_date is None)
Ejemplo n.º 3
0
    def test_backup_cell_to_s3(self):
        session = self.db_master_session
        batch_size = 10
        measures = []
        for i in range(batch_size):
            measures.append(CellMeasure())
        session.add_all(measures)
        session.flush()
        start_id = measures[0].id

        blocks = schedule_cellmeasure_archival(batch=batch_size)
        self.assertEquals(len(blocks), 1)
        block = blocks[0]
        self.assertEquals(block, (start_id, start_id + batch_size))

        with mock_s3():
            with patch.object(S3Backend,
                              'backup_archive', lambda x, y, z: True):
                zips = write_cellmeasure_s3_backups(cleanup_zip=False)
                self.assertTrue(len(zips), 1)
                fname = zips[0]
                myzip = ZipFile(fname)
                try:
                    contents = set(myzip.namelist())
                    expected_contents = set(['alembic_revision.txt',
                                             'cell_measure.csv'])
                    self.assertEquals(expected_contents, contents)
                finally:
                    myzip.close()

        blocks = session.query(MeasureBlock).all()

        self.assertEquals(len(blocks), 1)
        block = blocks[0]

        actual_sha = hashlib.sha1()
        actual_sha.update(open(fname, 'rb').read())
        self.assertEquals(block.archive_sha, actual_sha.digest())
        self.assertTrue(block.s3_key is not None)
        self.assertTrue(block.archive_date is None)