def upgradeDB(_): from buildbot.db import connector db = connector.DBConnector(None, config['db'], basedir=config['basedir']) if not config['quiet']: print "upgrading database" return db.model.upgrade()
def upgradeDatabase(config, master_cfg): if not config['quiet']: print("upgrading database (%s)" % (stripUrlPassword(master_cfg.db['db_url']))) print("Warning: Stopping this process might cause data loss") def sighandler(signum, frame): msg = " ".join(""" WARNING: ignoring signal %s. This process should not be interrupted to avoid database corruption. If you really need to terminate it, use SIGKILL. """.split()) print(msg % signum) for signame in ("SIGTERM", "SIGINT", "SIGQUIT", "SIGHUP", "SIGUSR1", "SIGUSR2", "SIGBREAK"): if hasattr(signal, signame): signal.signal(getattr(signal, signame), sighandler) master = BuildMaster(config['basedir']) master.config = master_cfg master.db.disownServiceParent() db = connector.DBConnector(basedir=config['basedir']) db.setServiceParent(master) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade() yield db.masters.setAllMastersActiveLongTimeAgo()
def create_child_services(self): # note that these are order-dependent. If you get the order wrong, # you'll know it, as the master will fail to start. self.metrics = metrics.MetricLogObserver() self.metrics.setServiceParent(self) self.caches = cache.CacheManager() self.caches.setServiceParent(self) self.pbmanager = buildbot.pbmanager.PBManager() self.pbmanager.setServiceParent(self) self.change_svc = ChangeManager(self) self.change_svc.setServiceParent(self) self.botmaster = BotMaster(self) self.botmaster.setServiceParent(self) self.scheduler_manager = SchedulerManager(self) self.scheduler_manager.setServiceParent(self) self.user_manager = UserManagerManager(self) self.user_manager.setServiceParent(self) self.db = connector.DBConnector(self, self.basedir) self.db.setServiceParent(self) self.debug = debug.DebugServices(self) self.debug.setServiceParent(self) self.status = Status(self) self.status.setServiceParent(self)
def create_child_services(self): # note that these are order-dependent. If you get the order wrong, # you'll know it, as the master will fail to start. self.metrics = metrics.MetricLogObserver() self.metrics.setServiceParent(self) self.caches = cache.CacheManager() self.caches.setServiceParent(self) self.pbmanager = buildbot.pbmanager.PBManager() self.pbmanager.setServiceParent(self) self.buildslaves = bslavemanager.BuildslaveManager(self) self.buildslaves.setServiceParent(self) self.change_svc = ChangeManager(self) self.change_svc.setServiceParent(self) self.botmaster = BotMaster(self) self.botmaster.setServiceParent(self) self.scheduler_manager = SchedulerManager(self) self.scheduler_manager.setServiceParent(self) self.user_manager = UserManagerManager(self) self.user_manager.setServiceParent(self) self.db = dbconnector.DBConnector(self, self.basedir) self.db.setServiceParent(self) self.mq = mqconnector.MQConnector(self) self.mq.setServiceParent(self) self.data = dataconnector.DataConnector(self) self.data.setServiceParent(self) self.www = wwwservice.WWWService(self) self.www.setServiceParent(self) self.debug = debug.DebugServices(self) self.debug.setServiceParent(self) self.status = Status(self) self.status.setServiceParent(self) self.masterHouskeepingTimer = 0 @defer.inlineCallbacks def heartbeat(): if self.masterid is not None: yield self.data.updates.masterActive(name=self.name, masterid=self.masterid) # force housekeeping once a day yield self.data.updates.expireMasters( (self.masterHouskeepingTimer % (24 * 60)) == 0) self.masterHouskeepingTimer += 1 self.masterHeartbeatService = internet.TimerService(60, heartbeat) self.masterHeartbeatService.setServiceParent(self)
def setUpUpgradeTest(self): self.basedir = None if self.source_tarball: tarball = util.sibpath(__file__, self.source_tarball) if not os.path.exists(tarball): raise unittest.SkipTest( "'%s' not found (normal when not building from Git)" % tarball) tf = tarfile.open(tarball) prefixes = set() for inf in tf: tf.extract(inf) prefixes.add(inf.name.split('/', 1)[0]) # (note that tf.extractall isn't available in py2.4) # get the top-level dir from the tarball assert len(prefixes) == 1, "tarball has multiple top-level dirs!" self.basedir = prefixes.pop() else: if not os.path.exists("basedir"): os.makedirs("basedir") self.basedir = os.path.abspath("basedir") master = fakemaster.make_master() master.config.db['db_url'] = self.db_url self.db = connector.DBConnector(master, self.basedir) d = self.db.setup(check_version=False) @d.addCallback def setup_logging(_): querylog.log_from_engine(self.db.pool.engine) return d
def create_db(self): from buildbot.db import connector from buildbot.master import BuildMaster db = connector.DBConnector(BuildMaster(self.basedir), self.config['db'], basedir=self.basedir) if not self.config['quiet']: print "creating database" d = db.model.upgrade() return d
def setUpMigrateTest(self): self.basedir = os.path.abspath("basedir") self.setUpDirs('basedir') yield self.setUpRealDatabase() master = fakemaster.make_master() self.db = connector.DBConnector(self.basedir) self.db.setServiceParent(master) self.db.pool = self.db_pool
def setUp(self): if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) spec = dbspec.DBSpec.from_url("sqlite:///state.sqlite", self.basedir) manager = DBSchemaManager(spec, self.basedir) manager.upgrade() self.dbc = connector.DBConnector(spec) self.dbc.start()
def upgradeDatabase(config, master_cfg): if not config['quiet']: print "upgrading database (%s)" % (master_cfg.db['db_url']) master = BuildMaster(config['basedir']) master.config = master_cfg db = connector.DBConnector(master, basedir=config['basedir']) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade()
def setUp(self): yield self.setUpRealDatabase(table_names=[ 'changes', 'change_properties', 'change_files', 'patches', 'sourcestamps', 'buildset_properties', 'buildsets', 'sourcestampsets']) self.master = fakemaster.make_master() self.master.config = config.MasterConfig() self.db = connector.DBConnector(self.master, os.path.abspath('basedir'))
def setUp(self): self.setUpTestReactor() yield self.setUpRealDatabase(table_names=[ 'changes', 'change_properties', 'change_files', 'patches', 'sourcestamps', 'buildset_properties', 'buildsets', 'sourcestampsets', 'builds', 'builders', 'masters', 'buildrequests', 'workers']) self.master = fakemaster.make_master(self) self.master.config = config.MasterConfig() self.db = connector.DBConnector(os.path.abspath('basedir')) self.db.setServiceParent(self.master)
def setUp(self): if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) spec = dbspec.DBSpec.from_url("sqlite:///state.sqlite", self.basedir) # For testing against mysql, uncomment this # spec = dbspec.DBSpec.from_url("mysql://buildbot@localhost/buildbot_schedulers", self.basedir) manager = DBSchemaManager(spec, self.basedir) manager.upgrade() self.dbc = connector.DBConnector(spec) self.dbc.start()
def upgradeDatabase(config, master_cfg): if not config['quiet']: print("upgrading database (%s)" % (master_cfg.db['db_url'])) master = BuildMaster(config['basedir']) master.config = master_cfg master.db.disownServiceParent() db = connector.DBConnector(basedir=config['basedir']) db.setServiceParent(master) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade() yield db.masters.setAllMastersActiveLongTimeAgo()
def setUp(self): # set up a full master serving HTTP yield self.setUpRealDatabase(table_names=['masters', 'objects', 'object_state'], sqlite_memory=False) master = fakemaster.FakeMaster(reactor) master.config.db = dict(db_url=self.db_url) master.db = dbconnector.DBConnector('basedir') yield master.db.setServiceParent(master) yield master.db.setup(check_version=False) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master # build an HTTP agent, using an explicit connection pool if Twisted # supports it (Twisted 13.0.0 and up) if hasattr(client, 'HTTPConnectionPool'): self.pool = client.HTTPConnectionPool(reactor) self.agent = client.Agent(reactor, pool=self.pool) else: self.pool = None self.agent = client.Agent(reactor)
def setUp(self): wfd = defer.waitForDeferred( self.setUpRealDatabase(table_names=[ 'changes', 'change_properties', 'change_links', 'change_files', 'patches', 'sourcestamps', 'buildset_properties', 'buildsets' ])) yield wfd wfd.getResult() self.master = fakemaster.make_master() self.master.config = config.MasterConfig() self.db = connector.DBConnector(self.master, os.path.abspath('basedir'))
def setUp(self): if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) spec = dbspec.DBSpec.from_url("sqlite:///state.sqlite", self.basedir) manager = DBSchemaManager(spec, self.basedir) manager.upgrade() self.dbc = connector.DBConnector(spec) self.dbc.start() self._patcher = mock.patch("buildbotcustom.scheduler.now") self._time = self._patcher.start() self._time.return_value = 123
def upgradeDatabase(config, master_cfg): if not config['quiet']: print("upgrading database (%s)" % (stripUrlPassword(master_cfg.db['db_url']))) print("Warning: Stopping this process might cause data loss") master = BuildMaster(config['basedir']) master.config = master_cfg master.db.disownServiceParent() db = connector.DBConnector(basedir=config['basedir']) db.setServiceParent(master) yield db.setup(check_version=False, verbose=not config['quiet']) yield db.model.upgrade() yield db.masters.setAllMastersActiveLongTimeAgo()
def upgradeMaster(config): m = Maker(config) if not config['quiet']: print "upgrading basedir" basedir = os.path.expanduser(config['basedir']) # TODO: check Makefile # TODO: check TAC file # check web files: index.html, default.css, robots.txt m.upgrade_public_html({ 'bg_gradient.jpg': util.sibpath(__file__, "../status/web/files/bg_gradient.jpg"), 'default.css': util.sibpath(__file__, "../status/web/files/default.css"), 'robots.txt': util.sibpath(__file__, "../status/web/files/robots.txt"), 'favicon.ico': util.sibpath(__file__, "../status/web/files/favicon.ico"), }) m.populate_if_missing(os.path.join(basedir, "master.cfg.sample"), util.sibpath(__file__, "sample.cfg"), overwrite=True) # if index.html exists, use it to override the root page tempalte m.move_if_present(os.path.join(basedir, "public_html/index.html"), os.path.join(basedir, "templates/root.html")) if not config['quiet']: print "checking master.cfg" wfd = defer.waitForDeferred( m.check_master_cfg(expected_db_url=config['db'])) yield wfd rc = wfd.getResult() if rc == 0: from buildbot.db import connector from buildbot.master import BuildMaster if not config['quiet']: print "upgrading database" db = connector.DBConnector(BuildMaster(config['basedir']), config['db'], basedir=config['basedir']) wfd = defer.waitForDeferred(db.model.upgrade()) yield wfd wfd.getResult() if not config['quiet']: print "upgrade complete" yield 0 else: yield rc
def setUp(self): if os.path.exists(self.basedir): shutil.rmtree(self.basedir) os.makedirs(self.basedir) spec = dbspec.DBSpec.from_url("sqlite:///state.sqlite", self.basedir) manager = DBSchemaManager(spec, self.basedir) manager.upgrade() self.dbc = connector.DBConnector(spec) self.dbc.start() self.s = Scheduler(name="s", builderNames=["b1"]) self.s.parent = mock.Mock() self.s.parent.db = self.dbc return self.dbc.addSchedulers([self.s])
def createDB(config, _noMonkey=False): # apply the db monkeypatches (and others - no harm) if not _noMonkey: # pragma: no cover monkeypatches.patch_all() # create a master with the default configuration, but with db_url # overridden master_cfg = config_module.MasterConfig() master_cfg.db['db_url'] = config['db'] master = BuildMaster(config['basedir']) master.config = master_cfg db = connector.DBConnector(master, config['basedir']) yield db.setup(check_version=False, verbose=not config['quiet']) if not config['quiet']: print "creating database (%s)" % (master_cfg.db['db_url'], ) yield db.model.upgrade()
def create_db(self): from buildbot.db import connector from buildbot.master import BuildMaster from buildbot import config as config_module # create a master with the default configuration, but with db_url # overridden master_cfg = config_module.MasterConfig() master_cfg.db['db_url'] = self.config['db'] master = BuildMaster(self.basedir) master.config = master_cfg db = connector.DBConnector(master, self.basedir) d = db.setup(check_version=False) if not self.config['quiet']: print "creating database (%s)" % (master_cfg.db['db_url'], ) d = db.model.upgrade() return d
def setUp(self): # set up a full master serving HTTP yield self.setUpRealDatabase(table_names=['masters'], sqlite_memory=False) master = fakemaster.FakeMaster() master.config.db = dict(db_url=self.db_url) master.db = dbconnector.DBConnector(master, 'basedir') yield master.db.setup(check_version=False) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector(master) master.mq.setup() master.data = dataconnector.DataConnector(master) master.config.www = dict(port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), url="not yet known", avatar_methods=[]) master.www = wwwservice.WWWService(master) yield master.www.startService() yield master.www.reconfigService(master.config) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() master.config.www['url'] = self.url yield master.www.reconfigService(master.config) self.master = master # build an HTTP agent, using an explicit connection pool if Twisted # supports it (Twisted 13.0.0 and up) if hasattr(client, 'HTTPConnectionPool'): self.pool = client.HTTPConnectionPool(reactor) self.agent = client.Agent(reactor, pool=self.pool) else: self.pool = None self.agent = client.Agent(reactor)
def setUpUpgradeTest(self): self.basedir = None tarball = util.sibpath(__file__, self.source_tarball) if not os.path.exists(tarball): raise unittest.SkipTest( "'%s' not found (normal when not building from Git)" % tarball) tf = tarfile.open(tarball) prefixes = set() for inf in tf: tf.extract(inf) prefixes.add(inf.name.split('/', 1)[0]) # (note that tf.extractall isn't available in py2.4) # get the top-level dir from the tarball assert len(prefixes) == 1, "tarball has multiple top-level dirs!" self.basedir = prefixes.pop() self.db = connector.DBConnector(mock.Mock(), self.db_url, self.basedir)
def populate_database(config): master = BuildMaster(config['baseDir']) master.config = load_config(config, config['configFile']) db = connector.DBConnector(master, basedir=config['baseDir']) seed = int(time()) if config['seed']: seed = int(config['seed']) random.seed(seed) if not config['quiet']: print("Seed =", seed) yield db.setup(check_version=False, verbose=not config['quiet']) users = yield populate_user(db, int(config['users']), verbose=not config['quiet']) yield populate_build(db, int(config['builds']), master.config.builders, master.config.projects, users, verbose=not config['quiet'])
def setUpUpgradeTest(self): # set up the "real" db if desired if self.use_real_db: # note this changes self.db_url yield self.setUpRealDatabase(sqlite_memory=False) self.basedir = None if self.source_tarball: tarball = util.sibpath(__file__, self.source_tarball) if not os.path.exists(tarball): raise unittest.SkipTest( f"'{tarball}' not found (normal when not building from Git)" ) with tarfile.open(tarball) as tf: prefixes = set() for inf in tf: tf.extract(inf) prefixes.add(inf.name.split('/', 1)[0]) # (note that tf.extractall isn't available in py2.4) # get the top-level dir from the tarball assert len(prefixes) == 1, "tarball has multiple top-level dirs!" self.basedir = prefixes.pop() else: if not os.path.exists("basedir"): os.makedirs("basedir") self.basedir = os.path.abspath("basedir") self.master = master = fakemaster.make_master(self) master.config.db['db_url'] = self.db_url self.db = connector.DBConnector(self.basedir) yield self.db.setServiceParent(master) yield self.db.setup(check_version=False) self._sql_log_handler = querylog.start_log_queries()
}) m.populate_if_missing(os.path.join(basedir, "master.cfg.sample"), util.sibpath(__file__, "sample.cfg"), overwrite=True) # if index.html exists, use it to override the root page tempalte m.move_if_present(os.path.join(basedir, "public_html/index.html"), os.path.join(basedir, "templates/root.html")) from buildbot.db import connector from buildbot.master import BuildMaster if not config['quiet']: print "upgrading database (%s)" % (master_cfg.db['db_url']) master = BuildMaster(config['basedir']) master.config = master_cfg db = connector.DBConnector(master, basedir=config['basedir']) wfd = defer.waitForDeferred( db.setup(check_version=False, verbose=not config['quiet'])) yield wfd wfd.getResult() wfd = defer.waitForDeferred(db.model.upgrade()) yield wfd wfd.getResult() if not config['quiet']: print "upgrade complete" yield 0 class MasterOptions(MakerBase):
def Run(args): if args.no_dry_run: sqlite_filename = 'state.sqlite' else: sqlite_filename = 'dry-run-psql-conversion.sqlite' # Read the dbconfig. This will fail if the config doesn't exist and the # master doesn't use postgresql. dbconfig = {} execfile('.dbconfig', dbconfig) if args.no_dry_run: # Stop master manager from touching this master while we play with it. if os.path.exists('.stop_master_lifecycle'): raise Exception('A .stop_master_lifecycle file already exists') logging.info('Creating .stop_master_lifecycle file') with open('.stop_master_lifecycle', 'w') as fh: fh.write('migrate_psql_to_sqlite.py') # Stop the master. logging.info('Stopping master') subprocess.check_call(['make', 'stop']) subprocess.check_call(['make', 'wait']) # Dump the postgres database. logging.info('Dumping postgres database %s', dbconfig['dbname']) env = os.environ.copy() env['PGPASSWORD'] = dbconfig['password'] sql = subprocess.check_output([ 'pg_dump', '-d', dbconfig['dbname'], '-U', dbconfig['username'], '-h', 'localhost', '--data-only', '--inserts' ], env=env) # Strip out postgres-specific things. sql = '\n'.join( line for line in sql.splitlines() if not line.startswith('SET') and not line.startswith( 'INSERT INTO migrate_version') and not 'pg_catalog.setval' in line) # Delete any existing sqlite database. if os.path.exists(sqlite_filename): os.unlink(sqlite_filename) # Create the new sqlite database. logging.info('Creating empty sqlite database in %s', sqlite_filename) db = connector.DBConnector(FakeBuildMaster(), 'sqlite:///%s' % sqlite_filename, '.') yield db.model.upgrade() # Import the data into the sqlite database. logging.info('Filling sqlite database %s', sqlite_filename) conn = sqlite3.connect(sqlite_filename) cursor = conn.cursor() cursor.execute('pragma synchronous = off') cursor.execute('pragma journal_mode = memory') cursor.executescript(sql) conn.commit() conn.close() if args.no_dry_run: # Remove the .dbconfig to make it use the sqlite database. logging.info('Moving .dbconfig file to dbconfig.bak') os.rename('.dbconfig', 'dbconfig.bak') # Start the master. logging.info('Starting master') subprocess.check_call(['make', 'start']) # Let master manager take over again. logging.info('Removing .stop_master_lifecycle file') os.unlink('.stop_master_lifecycle') logging.info('Done!') else: logging.info('Dry-run done!')
def setUp(self): yield self.setUpRealDatabase(table_names=[ 'objects', 'object_state', 'masters', 'workers', 'configured_workers', 'connected_workers', 'builder_masters', 'builders' ], sqlite_memory=False) master = fakemaster.FakeMaster(reactor) master.data = dataconnector.DataConnector() master.data.setServiceParent(master) master.config.db = dict(db_url=self.db_url) master.db = dbconnector.DBConnector('basedir') master.db.setServiceParent(master) yield master.db.setup(check_version=False) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() master.mq.setServiceParent(master) master.mq.setup() master.config.www = dict(port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master self.agent = client.Agent(reactor) # create a telegram bot service tb = master.config.services['TelegramBot'] = telegram.TelegramBot( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['worker']) tb._get_http = self.get_http tb.setServiceParent(self.master) self.bot_url = self.url + b"telegram12345:secret" yield tb.startService() self.sent_messages = [] def send_message(chat, message, **kwargs): self.sent_messages.append((chat, message)) tb.bot.send_message = send_message
def make_dbc(_): master = fakemaster.make_master() self.db = connector.DBConnector(master, self.basedir) return self.db.setup(check_version=False)
def make_dbc(_): master = fakemaster.make_master() self.db = connector.DBConnector(master, self.basedir) self.db.pool = self.db_pool