Exemple #1
0
class TestCleanupDbRealDb(db.RealDatabaseMixin, TestCleanupDb):
    @defer.inlineCallbacks
    def setUp(self):
        super().setUp()
        yield self.setUpRealDatabase(table_names=[
            'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters',
            'buildrequests', 'buildsets', 'workers'
        ])
        master = fakemaster.make_master(self)
        master.config.db['db_url'] = self.db_url
        self.db = DBConnector(self.basedir)
        self.db.setServiceParent(master)
        self.db.pool = self.db_pool

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.tearDownRealDatabase()

    @defer.inlineCallbacks
    def test_cleanup(self):
        # we reuse the fake db background data from db.logs unit tests
        yield self.insertTestData(test_db_logs.Tests.backgroundData)

        # insert a log with lots of redundancy
        LOGDATA = "xx\n" * 2000
        logid = yield self.db.logs.addLog(102, "x", "x", "s")
        yield self.db.logs.appendLog(logid, LOGDATA)

        # test all methods
        lengths = {}
        for mode in self.db.logs.COMPRESSION_MODE:
            if mode == "lz4" and not hasLz4:
                # ok.. lz4 is not installed, don't fail
                lengths["lz4"] = 40
                continue
            # create a master.cfg with different compression method
            self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode, ))
            res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
            self.assertEqual(res, 0)

            # make sure the compression don't change the data we can retrieve
            # via api
            res = yield self.db.logs.getLogLines(logid, 0, 2000)
            self.assertEqual(res, LOGDATA)

            # retrieve the actual data size in db using raw sqlalchemy
            def thd(conn):
                tbl = self.db.model.logchunks
                q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))])
                q = q.where(tbl.c.logid == logid)
                return conn.execute(q).fetchone()[0]

            lengths[mode] = yield self.db.pool.do(thd)

        self.assertDictAlmostEqual(lengths, {
            'raw': 5999,
            'bz2': 44,
            'lz4': 40,
            'gz': 31
        })
class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin,
                    db.RealDatabaseMixin, unittest.TestCase):

    def setUp(self):
        self.origcwd = os.getcwd()
        self.setUpDirs('basedir')
        with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f:
            f.write(textwrap.dedent("""
                from twisted.application import service
                application = service.Application('buildmaster')
            """))
        self.setUpStdoutAssertions()

    def tearDown(self):
        os.chdir(self.origcwd)
        self.tearDownDirs()

    def createMasterCfg(self, extraconfig=""):
        os.chdir(self.origcwd)
        with open(os.path.join('basedir', 'master.cfg'), 'wt') as f:
            f.write(textwrap.dedent("""
                from buildbot.plugins import *
                c = BuildmasterConfig = dict()
                c['db_url'] = "{dburl}"
                c['multiMaster'] = True  # dont complain for no builders
                {extraconfig}
            """.format(dburl=os.environ.get("BUILDBOT_TEST_DB_URL"),
                       extraconfig=extraconfig)))

    @defer.inlineCallbacks
    def test_cleanup_not_basedir(self):
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist'))
        self.assertEqual(res, 1)
        self.assertInStdout('invalid buildmaster directory')

    @defer.inlineCallbacks
    def test_cleanup_bad_config(self):
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
        self.assertEqual(res, 1)
        self.assertInStdout("master.cfg' does not exist")

    @defer.inlineCallbacks
    def test_cleanup_bad_config2(self):
        self.createMasterCfg(extraconfig="++++ # syntaxerror")
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
        self.assertEqual(res, 1)
        self.assertInStdout("error while parsing config")
        # config logs an error via log.err, we must eat it or trial will complain
        self.flushLoggedErrors()

    @defer.inlineCallbacks
    def test_cleanup(self):

        # test may use mysql or pg if configured in env
        if "BUILDBOT_TEST_DB_URL" not in os.environ:
            os.environ["BUILDBOT_TEST_DB_URL"] = "sqlite:///" + os.path.join(self.origcwd,
                                                                             "basedir", "state.sqlite")
        # we reuse RealDatabaseMixin to setup the db
        yield self.setUpRealDatabase(table_names=['logs', 'logchunks', 'steps', 'builds', 'builders',
                                                  'masters', 'buildrequests', 'buildsets',
                                                  'workers'])
        master = fakemaster.make_master()
        master.config.db['db_url'] = self.db_url
        self.db = DBConnector(self.basedir)
        self.db.setServiceParent(master)
        self.db.pool = self.db_pool

        # we reuse the fake db background data from db.logs unit tests
        yield self.insertTestData(test_db_logs.Tests.backgroundData)

        # insert a log with lots of redundancy
        LOGDATA = "xx\n" * 2000
        logid = yield self.db.logs.addLog(102, "x", "x", "s")
        yield self.db.logs.appendLog(logid, LOGDATA)

        # test all methods
        lengths = {}
        for mode in self.db.logs.COMPRESSION_MODE.keys():
            if mode == "lz4" and not hasLz4:
                # ok.. lz4 is not installed, dont fail
                lengths["lz4"] = 40
                continue
            # create a master.cfg with different compression method
            self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode,))
            res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
            self.assertEqual(res, 0)

            # make sure the compression don't change the data we can retrieve via api
            res = yield self.db.logs.getLogLines(logid, 0, 2000)
            self.assertEqual(res, LOGDATA)

            # retrieve the actual data size in db using raw sqlalchemy
            def thd(conn):
                tbl = self.db.model.logchunks
                q = sa.select([tbl.c.content])
                q = q.where(tbl.c.logid == logid)
                return sum([len(row.content) for row in conn.execute(q)])
            lengths[mode] = yield self.db.pool.do(thd)

        self.assertDictAlmostEqual(lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31})

    def assertDictAlmostEqual(self, d1, d2):
        # The test shows each methods return different size
        # but we still make a fuzzy comparaison to resist if underlying libraries
        # improve efficiency
        self.assertEqual(len(d1), len(d2))
        for k in d2.keys():
            self.assertApproximates(d1[k], d2[k], 10)
Exemple #3
0
class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin,
                    db.RealDatabaseMixin, TestReactorMixin, unittest.TestCase):
    def setUp(self):
        self.setUpTestReactor()
        self.origcwd = os.getcwd()
        self.setUpDirs('basedir')
        with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f:
            f.write(
                textwrap.dedent("""
                from twisted.application import service
                application = service.Application('buildmaster')
            """))
        self.setUpStdoutAssertions()
        self.ensureNoSqliteMemory()

    def tearDown(self):
        os.chdir(self.origcwd)
        self.tearDownDirs()

    def ensureNoSqliteMemory(self):
        # test may use mysql or pg if configured in env
        envkey = "BUILDBOT_TEST_DB_URL"
        if envkey not in os.environ or os.environ[envkey] == 'sqlite://':

            patch_environ(
                self, envkey, "sqlite:///" +
                os.path.join(self.origcwd, "basedir", "state.sqlite"))

    def createMasterCfg(self, extraconfig=""):
        os.chdir(self.origcwd)
        with open(os.path.join('basedir', 'master.cfg'), 'wt') as f:
            f.write(
                textwrap.dedent("""
                from buildbot.plugins import *
                c = BuildmasterConfig = dict()
                c['db_url'] = {dburl}
                c['buildbotNetUsageData'] = None
                c['multiMaster'] = True  # don't complain for no builders
                {extraconfig}
            """.format(dburl=repr(os.environ["BUILDBOT_TEST_DB_URL"]),
                       extraconfig=extraconfig)))

    @defer.inlineCallbacks
    def test_cleanup_not_basedir(self):
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist'))
        self.assertEqual(res, 1)
        self.assertInStdout('invalid buildmaster directory')

    @defer.inlineCallbacks
    def test_cleanup_bad_config(self):
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
        self.assertEqual(res, 1)
        self.assertInStdout("master.cfg' does not exist")

    @defer.inlineCallbacks
    def test_cleanup_bad_config2(self):

        self.createMasterCfg(extraconfig="++++ # syntaxerror")
        res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
        self.assertEqual(res, 1)
        self.assertInStdout(
            "encountered a SyntaxError while parsing config file:")
        # config logs an error via log.err, we must eat it or trial will
        # complain
        self.flushLoggedErrors()

    @flaky(bugNumber=4406, onPlatform='win32')
    @defer.inlineCallbacks
    def test_cleanup(self):

        # we reuse RealDatabaseMixin to setup the db
        yield self.setUpRealDatabase(table_names=[
            'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters',
            'buildrequests', 'buildsets', 'workers'
        ])
        master = fakemaster.make_master(self)
        master.config.db['db_url'] = self.db_url
        self.db = DBConnector(self.basedir)
        self.db.setServiceParent(master)
        self.db.pool = self.db_pool

        # we reuse the fake db background data from db.logs unit tests
        yield self.insertTestData(test_db_logs.Tests.backgroundData)

        # insert a log with lots of redundancy
        LOGDATA = "xx\n" * 2000
        logid = yield self.db.logs.addLog(102, "x", "x", "s")
        yield self.db.logs.appendLog(logid, LOGDATA)

        # test all methods
        lengths = {}
        for mode in self.db.logs.COMPRESSION_MODE:
            if mode == "lz4" and not hasLz4:
                # ok.. lz4 is not installed, don't fail
                lengths["lz4"] = 40
                continue
            # create a master.cfg with different compression method
            self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode, ))
            res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir'))
            self.assertEqual(res, 0)

            # make sure the compression don't change the data we can retrieve
            # via api
            res = yield self.db.logs.getLogLines(logid, 0, 2000)
            self.assertEqual(res, LOGDATA)

            # retrieve the actual data size in db using raw sqlalchemy
            def thd(conn):
                tbl = self.db.model.logchunks
                q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))])
                q = q.where(tbl.c.logid == logid)
                return conn.execute(q).fetchone()[0]

            lengths[mode] = yield self.db.pool.do(thd)

        self.assertDictAlmostEqual(lengths, {
            'raw': 5999,
            'bz2': 44,
            'lz4': 40,
            'gz': 31
        })

    def assertDictAlmostEqual(self, d1, d2):
        # The test shows each methods return different size
        # but we still make a fuzzy comparison to resist if underlying libraries
        # improve efficiency
        self.assertEqual(len(d1), len(d2))
        for k in d2.keys():
            self.assertApproximates(d1[k], d2[k], 10)
Exemple #4
0
class TestUnicodeChanges(change_import.ChangeImportMixin, unittest.TestCase):

    def setUp(self):
        d = self.setUpChangeImport()

        @d.addCallback
        def make_dbc(_):
            master = fakemaster.make_master()
            master.config.db['db_url'] = self.db_url
            self.db = DBConnector(self.basedir)
            self.db.setServiceParent(master)
            return self.db.setup(check_version=False)

        # note the connector isn't started, as we're testing upgrades
        return d

    def tearDown(self):
        return self.tearDownChangeImport()

    # tests

    def testUnicodeChange(self):
        self.make_pickle(
            self.make_change(
                who=u"Frosty the \N{SNOWMAN}".encode("utf8"),
                files=["foo"],
                comments=u"Frosty the \N{SNOWMAN}".encode("utf8"),
                branch="b1",
                revision=12345))

        d = self.db.model.upgrade()
        d.addCallback(lambda _: self.db.changes.getChange(1))

        @d.addCallback
        def check(c):
            self.failIf(c is None)
            self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}")
            self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}")
        return d

    def testNonUnicodeChange(self):
        self.make_pickle(
            self.make_change(
                who="\xff\xff\x00",
                files=["foo"],
                comments="\xff\xff\x00",
                branch="b1",
                revision=12345))

        d = self.db.model.upgrade()
        return self.assertFailure(d, UnicodeError)

    def testAsciiChange(self):
        self.make_pickle(
            self.make_change(
                who="Frosty the Snowman",
                files=["foo"],
                comments="Frosty the Snowman",
                branch="b1",
                revision=12345))

        d = self.db.model.upgrade()
        d.addCallback(lambda _: self.db.changes.getChange(1))

        @d.addCallback
        def check(c):
            self.failIf(c is None)
            self.assertEquals(c['author'], "Frosty the Snowman")
            self.assertEquals(c['comments'], "Frosty the Snowman")
        return d

    def testUTF16Change(self):
        self.make_pickle(
            self.make_change(
                who=u"Frosty the \N{SNOWMAN}".encode("utf16"),
                files=[u"foo".encode('utf16')],
                comments=u"Frosty the \N{SNOWMAN}".encode("utf16"),
                branch="b1",
                revision=12345),
            # instead of running contrib/fix_changes_pickle_encoding.py, we
            # just call the changemanager's recode_changes directly - it's
            # the function at the heart of the script anyway.
            recode_fn=lambda cm: cm.recode_changes('utf16', quiet=True))

        d = self.db.model.upgrade()
        d.addCallback(lambda _: self.db.changes.getChange(1))

        @d.addCallback
        def check(c):
            self.failIf(c is None)
            self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}")
            self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}")
        return d