class TestCleanupDbRealDb(db.RealDatabaseMixin, TestCleanupDb): @defer.inlineCallbacks def setUp(self): super().setUp() yield self.setUpRealDatabase(table_names=[ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ]) master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool @defer.inlineCallbacks def tearDown(self): yield self.tearDownRealDatabase() @defer.inlineCallbacks def test_cleanup(self): # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.db.logs.addLog(102, "x", "x", "s") yield self.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.db.logs.COMPRESSION_MODE: if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, don't fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode, )) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.db.model.logchunks q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) return conn.execute(q).fetchone()[0] lengths[mode] = yield self.db.pool.do(thd) self.assertDictAlmostEqual(lengths, { 'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31 })
def test_cleanup(self): # test may use mysql or pg if configured in env if "BUILDBOT_TEST_DB_URL" not in os.environ: patch_environ( self, "BUILDBOT_TEST_DB_URL", "sqlite:///" + os.path.join(self.origcwd, "basedir", "state.sqlite")) # we reuse RealDatabaseMixin to setup the db yield self.setUpRealDatabase(table_names=[ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ]) master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.db.logs.addLog(102, "x", "x", "s") yield self.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.db.logs.COMPRESSION_MODE.keys(): if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, dont fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode, )) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.db.model.logchunks q = sa.select([tbl.c.content]) q = q.where(tbl.c.logid == logid) return sum([len(row.content) for row in conn.execute(q)]) lengths[mode] = yield self.db.pool.do(thd) self.assertDictAlmostEqual(lengths, { 'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31 })
class TestChangeProperties(unittest.TestCase): def setUp(self): self.basedir = "ChangeProperties" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.sm = manager.DBSchemaManager(self.spec, self.basedir) self.sm.upgrade(quiet=True) self.db = DBConnector(self.spec) self.db.start() def tearDown(self): self.db.stop() shutil.rmtree(self.basedir) def testDBGetChangeNumberedNow(self): db = self.db c = Change(who="catlee", files=["foo"], comments="", branch="b1") c.properties.setProperty("foo", "bar", "property_source") db.addChangeToDatabase(c) c1 = db.getChangeNumberedNow(c.number) self.assertEquals(c1.properties, c.properties) # Flush the cache db._change_cache = util.LRUCache() c1 = db.getChangeNumberedNow(c.number) self.assertEquals(c1.properties, c.properties) def testDBGetChangeByNumber(self): db = self.db c = Change(who="catlee", files=["foo"], comments="", branch="b1") c.properties.setProperty("foo", "bar", "property_source") db.addChangeToDatabase(c) d = db.getChangeByNumber(c.number) def check(c1): self.assertEquals(c1.properties, c.properties) d.addCallback(check) def flush(ign): # Flush the cache db._change_cache = util.LRUCache() return db.getChangeByNumber(c.number) d.addCallback(flush) d.addCallback(check)
def setUp(self): d = self.setUpChangeImport() self.db = DBConnector(mock.Mock(), self.db_url, self.basedir) def make_dbc(_): self.db = DBConnector(mock.Mock(), self.db_url, self.basedir) d.addCallback(make_dbc) # note the connector isn't started, as we're testing upgrades return d
def setUp(self): self.basedir = "WeirdChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start()
def setUp(self): super().setUp() yield self.setUpRealDatabase(table_names=[ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ]) master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool
def setUp(self): self.basedir = "ChangeProperties" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.sm = manager.DBSchemaManager(self.spec, self.basedir) self.sm.upgrade(quiet=True) self.db = DBConnector(self.spec) self.db.start()
def setUpRealDatabaseWithConnector(self, master, table_names=None, basedir='basedir', want_pool=True, sqlite_memory=True): yield self.setUpRealDatabase(table_names, basedir, want_pool, sqlite_memory) master.config.db['db_url'] = self.db_url master.db = DBConnector(self.basedir) yield master.db.setServiceParent(master) master.db.pool = self.db_pool
def setUp(self): d = self.setUpChangeImport() self.db = DBConnector(self.db_url, self.basedir) def make_dbc(_): self.db = DBConnector(self.db_url, self.basedir) d.addCallback(make_dbc) # note the connector isn't started, as we're testing upgrades return d
def setUp(self): self.basedir = "MySQLDBUnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url( "mysql://*****:*****@localhost/buildbot_test", self.basedir) self.db = DBConnector(self.spec) self.db.start() result = self.db.runQueryNow("SHOW TABLES") for row in result: self.db.runQueryNow("DROP TABLE %s" % row[0]) self.db.runQueryNow("COMMIT")
class TestWeirdChanges(unittest.TestCase): def setUp(self): self.basedir = "WeirdChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start() def tearDown(self): if self.db: self.db.stop() if os.path.exists(self.basedir): shutil.rmtree(self.basedir) def mkchanges(self, changes): import buildbot.changes.changes cm = buildbot.changes.changes.OldChangeMaster() cm.changes = changes return cm def testListsAsFilenames(self): # Create changes.pck changes = [ Change(who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=[["foo", "bar"], ['bing']], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345) ] cPickle.dump(self.mkchanges(changes), open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(sorted(c.files), sorted([u"foo", u"bar", u"bing"]))
def setUp(self): self.basedir = "UnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start()
def test_cleanup(self): # test may use mysql or pg if configured in env if "BUILDBOT_TEST_DB_URL" not in os.environ: patch_environ(self, "BUILDBOT_TEST_DB_URL", "sqlite:///" + os.path.join(self.origcwd, "basedir", "state.sqlite")) # we reuse RealDatabaseMixin to setup the db yield self.setUpRealDatabase(table_names=['logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.db.logs.addLog(102, "x", "x", "s") yield self.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.db.logs.COMPRESSION_MODE.keys(): if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, dont fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode,)) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.db.model.logchunks q = sa.select([tbl.c.content]) q = q.where(tbl.c.logid == logid) return sum([len(row.content) for row in conn.execute(q)]) lengths[mode] = yield self.db.pool.do(thd) self.assertDictAlmostEqual( lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31})
class TestWeirdChanges(unittest.TestCase): def setUp(self): self.basedir = "WeirdChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start() def tearDown(self): if self.db: self.db.stop() if os.path.exists(self.basedir): shutil.rmtree(self.basedir) def mkchanges(self, changes): import buildbot.changes.changes cm = buildbot.changes.changes.OldChangeMaster() cm.changes = changes return cm def testListsAsFilenames(self): # Create changes.pck changes = [Change(who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=[["foo","bar"],['bing']], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)] cPickle.dump(self.mkchanges(changes), open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(sorted(c.files), sorted([u"foo", u"bar", u"bing"]))
def setUp(self): self.basedir = "MySQLDBUnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("mysql://*****:*****@localhost/buildbot_test", self.basedir) self.db = DBConnector(self.spec) self.db.start() result = self.db.runQueryNow("SHOW TABLES") for row in result: self.db.runQueryNow("DROP TABLE %s" % row[0]) self.db.runQueryNow("COMMIT")
class TestUnicodeChanges(unittest.TestCase): def setUp(self): self.basedir = "UnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start() def tearDown(self): if self.db: self.db.stop() def mkchanges(self, changes): import buildbot.changes.changes cm = buildbot.changes.changes.OldChangeMaster() cm.changes = changes return cm def testUnicodeChange(self): # Create changes.pck changes = [Change(who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)] cPickle.dump(self.mkchanges(changes), open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") def testNonUnicodeChange(self): # Create changes.pck changes = [Change(who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345)] cPickle.dump(self.mkchanges(changes), open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) self.assertRaises(UnicodeError, lambda : sm.upgrade(quiet=True)) def testAsciiChange(self): # Create changes.pck changes = [Change(who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345)] cPickle.dump(self.mkchanges(changes), open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, "Frosty the Snowman") self.assertEquals(c.comments, "Frosty the Snowman") def testUTF16Change(self): # Create changes.pck cm = OldChangeMaster() cm.changes = [Change(who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345)] # instead of running contrib/fix_changes_pickle_encoding.py, we just call # the changemanager's recode_changes directly - it's the function at the # heart of the script anyway. cm.recode_changes('utf16', quiet=True) # and dump the recoded changemanager to changes.pck before trying a schema upgrade cPickle.dump(cm, open(os.path.join(self.basedir, "changes.pck"), "wb")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}")
class TestUnicodeChanges(change_import.ChangeImportMixin, unittest.TestCase): def setUp(self): d = self.setUpChangeImport() @d.addCallback def make_dbc(_): master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) return self.db.setup(check_version=False) # note the connector isn't started, as we're testing upgrades return d def tearDown(self): return self.tearDownChangeImport() # tests def testUnicodeChange(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) @d.addCallback def check(c): self.failIf(c is None) self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}") self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}") return d def testNonUnicodeChange(self): self.make_pickle( self.make_change( who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345)) d = self.db.model.upgrade() return self.assertFailure(d, UnicodeError) def testAsciiChange(self): self.make_pickle( self.make_change( who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) @d.addCallback def check(c): self.failIf(c is None) self.assertEquals(c['author'], "Frosty the Snowman") self.assertEquals(c['comments'], "Frosty the Snowman") return d def testUTF16Change(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=[u"foo".encode('utf16')], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345), # instead of running contrib/fix_changes_pickle_encoding.py, we # just call the changemanager's recode_changes directly - it's # the function at the heart of the script anyway. recode_fn=lambda cm: cm.recode_changes('utf16', quiet=True)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) @d.addCallback def check(c): self.failIf(c is None) self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}") self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}") return d
def make_dbc(_): self.db = DBConnector(self.db_url, self.basedir)
class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin, db.RealDatabaseMixin, unittest.TestCase): def setUp(self): self.origcwd = os.getcwd() self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write(textwrap.dedent(""" from twisted.application import service application = service.Application('buildmaster') """)) self.setUpStdoutAssertions() def tearDown(self): os.chdir(self.origcwd) self.tearDownDirs() def createMasterCfg(self, extraconfig=""): os.chdir(self.origcwd) with open(os.path.join('basedir', 'master.cfg'), 'wt') as f: f.write(textwrap.dedent(""" from buildbot.plugins import * c = BuildmasterConfig = dict() c['db_url'] = "{dburl}" c['multiMaster'] = True # dont complain for no builders {extraconfig} """.format(dburl=os.environ.get("BUILDBOT_TEST_DB_URL"), extraconfig=extraconfig))) @defer.inlineCallbacks def test_cleanup_not_basedir(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist')) self.assertEqual(res, 1) self.assertInStdout('invalid buildmaster directory') @defer.inlineCallbacks def test_cleanup_bad_config(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout("master.cfg' does not exist") @defer.inlineCallbacks def test_cleanup_bad_config2(self): self.createMasterCfg(extraconfig="++++ # syntaxerror") res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout("error while parsing config") # config logs an error via log.err, we must eat it or trial will complain self.flushLoggedErrors() @defer.inlineCallbacks def test_cleanup(self): # test may use mysql or pg if configured in env if "BUILDBOT_TEST_DB_URL" not in os.environ: os.environ["BUILDBOT_TEST_DB_URL"] = "sqlite:///" + os.path.join(self.origcwd, "basedir", "state.sqlite") # we reuse RealDatabaseMixin to setup the db yield self.setUpRealDatabase(table_names=['logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers']) master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.db.logs.addLog(102, "x", "x", "s") yield self.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.db.logs.COMPRESSION_MODE.keys(): if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, dont fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode,)) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve via api res = yield self.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.db.model.logchunks q = sa.select([tbl.c.content]) q = q.where(tbl.c.logid == logid) return sum([len(row.content) for row in conn.execute(q)]) lengths[mode] = yield self.db.pool.do(thd) self.assertDictAlmostEqual(lengths, {'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31}) def assertDictAlmostEqual(self, d1, d2): # The test shows each methods return different size # but we still make a fuzzy comparaison to resist if underlying libraries # improve efficiency self.assertEqual(len(d1), len(d2)) for k in d2.keys(): self.assertApproximates(d1[k], d2[k], 10)
class TestUnicodeChanges(change_import.ChangeImportMixin, unittest.TestCase): def setUp(self): d = self.setUpChangeImport() self.db = DBConnector(self.db_url, self.basedir) def make_dbc(_): self.db = DBConnector(self.db_url, self.basedir) d.addCallback(make_dbc) # note the connector isn't started, as we're testing upgrades return d def tearDown(self): if self.db: self.db.stop() return self.tearDownChangeImport() def testUnicodeChange(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _ : self.db.start()) d.addCallback(lambda _ : self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d def testNonUnicodeChange(self): self.make_pickle( self.make_change( who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345)) d = self.db.model.upgrade() def eb(f): self.failUnless("UnicodeError" in str(f)) def cb(r): self.fail("upgrade did not fail for non-unicode changes") d.addCallbacks(cb, eb) return d def testAsciiChange(self): self.make_pickle( self.make_change( who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _ : self.db.start()) d.addCallback(lambda _ : self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, "Frosty the Snowman") self.assertEquals(c.comments, "Frosty the Snowman") d.addCallback(check) return d def testUTF16Change(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=[u"foo".encode('utf16')], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345), # instead of running contrib/fix_changes_pickle_encoding.py, we # just call the changemanager's recode_changes directly - it's # the function at the heart of the script anyway. recode_fn=lambda cm : cm.recode_changes('utf16', quiet=True)) d = self.db.model.upgrade() d.addCallback(lambda _ : self.db.start()) d.addCallback(lambda _ : self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d
def make_dbc(_): master = fakemaster.make_master() self.db = DBConnector(master, self.db_url, self.basedir)
class TestUnicodeChanges(unittest.TestCase): def setUp(self): self.basedir = "UnicodeChanges" if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) # Now try the upgrade process, which will import the old changes. self.spec = DBSpec.from_url("sqlite:///state.sqlite", self.basedir) self.db = DBConnector(self.spec) self.db.start() def tearDown(self): if self.db: self.db.stop() def testUnicodeChange(self): # Create changes.pck changes = [ Change(who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345) ] cPickle.dump(Thing(changes=changes), open(os.path.join(self.basedir, "changes.pck"), "w")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") def testNonUnicodeChange(self): # Create changes.pck changes = [ Change(who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345) ] cPickle.dump(Thing(changes=changes), open(os.path.join(self.basedir, "changes.pck"), "w")) sm = manager.DBSchemaManager(self.spec, self.basedir) self.assertRaises(UnicodeError, lambda: sm.upgrade(quiet=True)) def testAsciiChange(self): # Create changes.pck changes = [ Change(who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345) ] cPickle.dump(Thing(changes=changes), open(os.path.join(self.basedir, "changes.pck"), "w")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, "Frosty the Snowman") self.assertEquals(c.comments, "Frosty the Snowman") def testUTF16Change(self): # Create changes.pck cm = OldChangeMaster() cm.changes = [ Change(who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345) ] # instead of running contrib/fix_changes_pickle_encoding.py, we just call # the changemanager's recode_changes directly - it's the function at the # heart of the script anyway. cm.recode_changes('utf16', quiet=True) # and dump the recoded changemanager to changes.pck before trying a schema upgrade cPickle.dump(cm, open(os.path.join(self.basedir, "changes.pck"), "w")) sm = manager.DBSchemaManager(self.spec, self.basedir) sm.upgrade(quiet=True) c = self.db.getChangeNumberedNow(1) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}")
class TestCleanupDb(misc.StdoutAssertionsMixin, dirs.DirsMixin, db.RealDatabaseMixin, TestReactorMixin, unittest.TestCase): def setUp(self): self.setUpTestReactor() self.origcwd = os.getcwd() self.setUpDirs('basedir') with open(os.path.join('basedir', 'buildbot.tac'), 'wt') as f: f.write( textwrap.dedent(""" from twisted.application import service application = service.Application('buildmaster') """)) self.setUpStdoutAssertions() self.ensureNoSqliteMemory() def tearDown(self): os.chdir(self.origcwd) self.tearDownDirs() def ensureNoSqliteMemory(self): # test may use mysql or pg if configured in env envkey = "BUILDBOT_TEST_DB_URL" if envkey not in os.environ or os.environ[envkey] == 'sqlite://': patch_environ( self, envkey, "sqlite:///" + os.path.join(self.origcwd, "basedir", "state.sqlite")) def createMasterCfg(self, extraconfig=""): os.chdir(self.origcwd) with open(os.path.join('basedir', 'master.cfg'), 'wt') as f: f.write( textwrap.dedent(""" from buildbot.plugins import * c = BuildmasterConfig = dict() c['db_url'] = {dburl} c['buildbotNetUsageData'] = None c['multiMaster'] = True # don't complain for no builders {extraconfig} """.format(dburl=repr(os.environ["BUILDBOT_TEST_DB_URL"]), extraconfig=extraconfig))) @defer.inlineCallbacks def test_cleanup_not_basedir(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='doesntexist')) self.assertEqual(res, 1) self.assertInStdout('invalid buildmaster directory') @defer.inlineCallbacks def test_cleanup_bad_config(self): res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout("master.cfg' does not exist") @defer.inlineCallbacks def test_cleanup_bad_config2(self): self.createMasterCfg(extraconfig="++++ # syntaxerror") res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 1) self.assertInStdout( "encountered a SyntaxError while parsing config file:") # config logs an error via log.err, we must eat it or trial will # complain self.flushLoggedErrors() @flaky(bugNumber=4406, onPlatform='win32') @defer.inlineCallbacks def test_cleanup(self): # we reuse RealDatabaseMixin to setup the db yield self.setUpRealDatabase(table_names=[ 'logs', 'logchunks', 'steps', 'builds', 'builders', 'masters', 'buildrequests', 'buildsets', 'workers' ]) master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool # we reuse the fake db background data from db.logs unit tests yield self.insertTestData(test_db_logs.Tests.backgroundData) # insert a log with lots of redundancy LOGDATA = "xx\n" * 2000 logid = yield self.db.logs.addLog(102, "x", "x", "s") yield self.db.logs.appendLog(logid, LOGDATA) # test all methods lengths = {} for mode in self.db.logs.COMPRESSION_MODE: if mode == "lz4" and not hasLz4: # ok.. lz4 is not installed, don't fail lengths["lz4"] = 40 continue # create a master.cfg with different compression method self.createMasterCfg("c['logCompressionMethod'] = '%s'" % (mode, )) res = yield cleanupdb._cleanupDatabase(mkconfig(basedir='basedir')) self.assertEqual(res, 0) # make sure the compression don't change the data we can retrieve # via api res = yield self.db.logs.getLogLines(logid, 0, 2000) self.assertEqual(res, LOGDATA) # retrieve the actual data size in db using raw sqlalchemy def thd(conn): tbl = self.db.model.logchunks q = sa.select([sa.func.sum(sa.func.length(tbl.c.content))]) q = q.where(tbl.c.logid == logid) return conn.execute(q).fetchone()[0] lengths[mode] = yield self.db.pool.do(thd) self.assertDictAlmostEqual(lengths, { 'raw': 5999, 'bz2': 44, 'lz4': 40, 'gz': 31 }) def assertDictAlmostEqual(self, d1, d2): # The test shows each methods return different size # but we still make a fuzzy comparison to resist if underlying libraries # improve efficiency self.assertEqual(len(d1), len(d2)) for k in d2.keys(): self.assertApproximates(d1[k], d2[k], 10)
def make_dbc(_): self.db = DBConnector(mock.Mock(), self.db_url, self.basedir)
class TestUnicodeChanges(change_import.ChangeImportMixin, unittest.TestCase): def setUp(self): d = self.setUpChangeImport() self.db = DBConnector(mock.Mock(), self.db_url, self.basedir) def make_dbc(_): self.db = DBConnector(mock.Mock(), self.db_url, self.basedir) d.addCallback(make_dbc) # note the connector isn't started, as we're testing upgrades return d def tearDown(self): if self.db: self.db.stop() return self.tearDownChangeImport() def testUnicodeChange(self): self.make_pickle( self.make_change(who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.start()) d.addCallback(lambda _: self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d def testNonUnicodeChange(self): self.make_pickle( self.make_change(who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345)) d = self.db.model.upgrade() def eb(f): self.failUnless("UnicodeError" in str(f)) def cb(r): self.fail("upgrade did not fail for non-unicode changes") d.addCallbacks(cb, eb) return d def testAsciiChange(self): self.make_pickle( self.make_change(who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.start()) d.addCallback(lambda _: self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, "Frosty the Snowman") self.assertEquals(c.comments, "Frosty the Snowman") d.addCallback(check) return d def testUTF16Change(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=[u"foo".encode('utf16')], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345), # instead of running contrib/fix_changes_pickle_encoding.py, we # just call the changemanager's recode_changes directly - it's # the function at the heart of the script anyway. recode_fn=lambda cm: cm.recode_changes('utf16', quiet=True)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.start()) d.addCallback(lambda _: self.db.changes.getChangeInstance(1)) def check(c): self.failIf(c is None) self.assertEquals(c.who, u"Frosty the \N{SNOWMAN}") self.assertEquals(c.comments, u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d
def make_dbc(_): master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(master, self.basedir) return self.db.setup(check_version=False)
class TestUnicodeChanges(change_import.ChangeImportMixin, unittest.TestCase): def setUp(self): d = self.setUpChangeImport() def make_dbc(_): master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(master, self.basedir) return self.db.setup(check_version=False) d.addCallback(make_dbc) # note the connector isn't started, as we're testing upgrades return d def tearDown(self): return self.tearDownChangeImport() # tests def testUnicodeChange(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf8"), files=["foo"], comments=u"Frosty the \N{SNOWMAN}".encode("utf8"), branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) def check(c): self.failIf(c is None) self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}") self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d def testNonUnicodeChange(self): self.make_pickle( self.make_change( who="\xff\xff\x00", files=["foo"], comments="\xff\xff\x00", branch="b1", revision=12345)) d = self.db.model.upgrade() return self.assertFailure(d, UnicodeError) def testAsciiChange(self): self.make_pickle( self.make_change( who="Frosty the Snowman", files=["foo"], comments="Frosty the Snowman", branch="b1", revision=12345)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) def check(c): self.failIf(c is None) self.assertEquals(c['author'], "Frosty the Snowman") self.assertEquals(c['comments'], "Frosty the Snowman") d.addCallback(check) return d def testUTF16Change(self): self.make_pickle( self.make_change( who=u"Frosty the \N{SNOWMAN}".encode("utf16"), files=[u"foo".encode('utf16')], comments=u"Frosty the \N{SNOWMAN}".encode("utf16"), branch="b1", revision=12345), # instead of running contrib/fix_changes_pickle_encoding.py, we # just call the changemanager's recode_changes directly - it's # the function at the heart of the script anyway. recode_fn=lambda cm: cm.recode_changes('utf16', quiet=True)) d = self.db.model.upgrade() d.addCallback(lambda _: self.db.changes.getChange(1)) def check(c): self.failIf(c is None) self.assertEquals(c['author'], u"Frosty the \N{SNOWMAN}") self.assertEquals(c['comments'], u"Frosty the \N{SNOWMAN}") d.addCallback(check) return d
def setUp(self): self.setUpChangeImport() self.db = DBConnector(self.db_url, self.basedir)
def make_dbc(_): master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = DBConnector(self.basedir) self.db.setServiceParent(master) return self.db.setup(check_version=False)