def main(argv): start_time = time.time() parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action='store_true') options, args = parser.parse_args(argv[1:]) if args: parser.error('Unknown arguments: %s' % args) num_skipped = len(BLACKLIST) masters_list = GetMasterCmds( masters=master_cfg_utils.GetMasters(include_internal=False), blacklist=BLACKLIST, pythonpaths=None) build_internal = os.path.join(BASE_DIR, '..', 'build_internal') if os.path.exists(build_internal): internal_test_data = chromium_utils.ParsePythonCfg( os.path.join(build_internal, 'tests', 'internal_masters_cfg.py'), fail_hard=True) internal_cfg = internal_test_data['masters_cfg_test'] num_skipped += len(internal_cfg['blacklist']) masters_list.extend(GetMasterCmds( masters=master_cfg_utils.GetMasters(include_public=False), blacklist=internal_cfg['blacklist'], pythonpaths=[os.path.join(build_internal, p) for p in internal_cfg['paths']])) with master_cfg_utils.TemporaryMasterPasswords(): processes = [subprocess.Popen([ sys.executable, os.path.join(BASE_DIR, 'scripts', 'slave', 'runbuild.py'), cmd.name, '--test-config'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=cmd.env) for cmd in masters_list] results = [(proc.communicate()[0], proc.returncode) for proc in processes] def GetCommandStr(cmd, cmd_output): out = [cmd.path] out.extend('> ' + line for line in cmd_output.splitlines()) return '\n'.join(out + ['']) if options.verbose: for cmd, (out, code) in zip(masters_list, results): # Failures will be printed below if code == 0 and out: print GetCommandStr(cmd, out) failures = [(cmd, out) for cmd, (out, r) in zip(masters_list, results) if r] if failures: print '\nFAILURE The following master.cfg files did not load:\n' for cmd, out in failures: print GetCommandStr(cmd, out) test_time = round(time.time() - start_time, 1) print 'Parsed %d masters successfully, %d failed, %d skipped in %gs.' % ( len(masters_list), len(failures), num_skipped, test_time) return bool(failures)
def SpawnSubdirBuildbotsIfNeeded(): """Creates /c directory structure and spawns other bots on host as needed.""" # 'make start' spawns subdirs bots only when run in /b. # TODO(ilevy): Remove this restriction after run_slave.py refactor. if chromium_utils.GetActiveSubdir(): return print 'Spawning other slaves on this host as needed.' print 'Run make stopall to terminate.' for slave in chromium_utils.GetSlavesForHost(): subdir = slave.get('subdir') if not subdir: continue botdir = os.path.join(GetRoot(), 'c', subdir) def GClientCall(command): # We just synced depot_tools, so disable gclient auto-sync. env = dict(os.environ, DEPOT_TOOLS_UPDATE='0') subprocess.check_call([GetGClientPath()] + command, env=env, cwd=botdir) gclient_solutions = chromium_utils.ParsePythonCfg( os.path.join(ROOT_DIR, '.gclient')).get('solutions', []) assert len(gclient_solutions) == 1 if subdir and not os.path.exists(botdir): print 'Creating %s' % botdir os.mkdir(botdir) GClientCall(['config', gclient_solutions[0]['url'], '--git-deps']) GClientCall(['sync']) shutil.copyfile( os.path.join(BUILD_DIR, 'site_config', '.bot_password'), os.path.join(botdir, 'build', 'site_config', '.bot_password')) bot_slavedir = os.path.join(botdir, 'build', 'slave') if not os.path.exists(os.path.join(bot_slavedir, 'twistd.pid')): print 'Spawning slave in %s' % bot_slavedir subprocess.check_call(['make', 'start'], cwd=bot_slavedir)
def main(): # Get public slaves. slaves_list = chromium_utils.GetAllSlaves(fail_hard=True, include_internal=False) # Get internal slaves, if appropriate. build_internal = os.path.join(BASE_DIR, '..', 'build_internal') if os.path.exists(build_internal): internal_test_data = chromium_utils.ParsePythonCfg(os.path.join( build_internal, 'tests', 'internal_masters_cfg.py'), fail_hard=True) internal_cfg = internal_test_data['masters_cfg_test'] internal_sys_path = [ os.path.join(build_internal, p) for p in internal_cfg['paths'] ] + sys.path with using_sys_path(internal_sys_path): slaves_list.extend( chromium_utils.GetAllSlaves(fail_hard=True, include_public=False)) status = 0 slaves = {} for slave in slaves_list: mastername = slave['mastername'] slavename = chromium_utils.EntryToSlaveName(slave) if slave.get('subdir') == 'b': print 'Illegal subdir for %s: %s' % (mastername, slavename) status = 1 if slavename and slave.get('hostname') not in WHITELIST: slaves.setdefault(slavename, []).append(mastername) for slavename, masters in slaves.iteritems(): if len(masters) > 1: print '%s duplicated in masters: %s' % (slavename, ' '.join(masters)) status = 1 return status
def main(argv): parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action='count', default=0) options, args = parser.parse_args(argv[1:]) if args: parser.error('Unknown args: %s' % args) levels = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG) logging.basicConfig(level=levels[min(options.verbose, len(levels)-1)]) # Remove site_config's we don't add ourselves. Can cause issues when running # this test under a buildbot-spawned process. sys.path = [x for x in sys.path if not x.endswith('site_config')] base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) build_internal = os.path.join(os.path.dirname(base_dir), 'build_internal') sys.path.extend(os.path.normpath(os.path.join(base_dir, d)) for d in ( 'site_config', os.path.join(build_internal, 'site_config'), )) public_masters = { 'master.chromium': 'Chromium', 'master.chromium.chrome': 'ChromiumChrome', 'master.chromium.chromedriver': 'ChromiumChromeDriver', 'master.chromium.chromiumos': 'ChromiumChromiumOS', 'master.chromium.endure': 'ChromiumEndure', 'master.chromium.fyi': 'ChromiumFYI', 'master.chromium.gatekeeper': 'Gatekeeper', 'master.chromium.git': 'ChromiumGit', 'master.chromium.gpu': 'ChromiumGPU', 'master.chromium.gpu.fyi': 'ChromiumGPUFYI', 'master.chromium.linux': 'ChromiumLinux', 'master.chromium.lkgr': 'ChromiumLKGR', 'master.chromium.mac': 'ChromiumMac', 'master.chromium.memory': 'ChromiumMemory', 'master.chromium.memory.fyi': 'ChromiumMemoryFYI', 'master.chromium.perf': 'ChromiumPerf', 'master.chromium.swarm': 'ChromiumSwarm', 'master.chromium.webkit': 'ChromiumWebkit', 'master.chromium.webrtc': 'ChromiumWebRTC', 'master.chromium.webrtc.fyi': 'ChromiumWebRTCFYI', 'master.chromium.win': 'ChromiumWin', 'master.chromiumos': 'ChromiumOS', 'master.chromiumos.tryserver': 'ChromiumOSTryServer', 'master.client.dart': 'Dart', 'master.client.dart.fyi': 'DartFYI', 'master.client.drmemory': 'DrMemory', 'master.client.dynamorio': 'DynamoRIO', 'master.client.libyuv': 'Libyuv', 'master.client.libvpx': 'Libvpx', 'master.client.nacl': 'NativeClient', 'master.client.nacl.ports': 'NativeClientPorts', 'master.client.nacl.ports.git': 'NativeClientPortsGit', 'master.client.nacl.sdk': 'NativeClientSDK', 'master.client.nacl.sdk.addin': 'NativeClientSDKAddIn', 'master.client.nacl.sdk.mono': 'NativeClientSDKMono', 'master.client.nacl.toolchain': 'NativeClientToolchain', 'master.client.oilpan': 'Oilpan', 'master.client.pagespeed': 'PageSpeed', 'master.client.polymer': 'Polymer', 'master.client.sfntly': 'Sfntly', 'master.client.skia': 'Skia', 'master.client.syzygy': 'Syzygy', 'master.client.v8': 'V8', 'master.client.v8.branches': 'V8Branches', 'master.client.webrtc': 'WebRTC', 'master.client.webrtc.fyi': 'WebRTCFYI', 'master.experimental': 'Experimental', 'master.push.canary': 'PushCanary', 'master.tryserver.chromium': 'TryServer', 'master.tryserver.chromium.gpu': 'GpuTryServer', 'master.tryserver.blink': 'BlinkTryServer', 'master.tryserver.libyuv': 'LibyuvTryServer', 'master.tryserver.nacl': 'NativeClientTryServer', 'master.tryserver.v8': 'V8TryServer', 'master.tryserver.webrtc': 'WebRTCTryServer', } all_masters = { base_dir: public_masters } if os.path.exists(build_internal): internal_test_data = chromium_utils.ParsePythonCfg( os.path.join(build_internal, 'tests', 'internal_masters_cfg.py'), fail_hard=True) all_masters[build_internal] = internal_test_data['masters_test'] return real_main(all_masters)
def SetupMaster(ActiveMaster): # Buildmaster config dict. c = {} config.DatabaseSetup(c, require_dbconfig=ActiveMaster.is_production_host) ####### CHANGESOURCES # Polls config.Master.trunk_url for changes poller = GitilesPoller( repo_url=ActiveMaster.repo_url, branches=['master'], pollInterval=10, revlinktmpl='https://skia.googlesource.com/skia/+/%s') c['change_source'] = [poller] ####### SLAVES # Load the slave list. We need some information from it in order to # produce the builders. slaves = slaves_list.SlavesList('slaves.cfg', ActiveMaster.project_name) ####### BUILDERS # Load the builders list. builders = chromium_utils.ParsePythonCfg('builders.cfg')['builders'] # Configure the Builders and Schedulers. SetupBuildersAndSchedulers(c=c, builders=builders, slaves=slaves, ActiveMaster=ActiveMaster) ####### BUILDSLAVES # The 'slaves' list defines the set of allowable buildslaves. List all the # slaves registered to a builder. Remove dupes. c['slaves'] = master_utils.AutoSetupSlaves(c['builders'], config.Master.GetBotPassword()) master_utils.VerifySetup(c, slaves) ####### STATUS TARGETS c['buildbotURL'] = ActiveMaster.buildbot_url # Adds common status and tools to this master. master_utils.AutoSetupMaster( c, ActiveMaster, public_html='../../../build/masters/master.client.skia/public_html', templates=[ '../../../build/masters/master.client.skia/templates', '../../../build/masters/master.chromium/templates' ], tagComparator=poller.comparator, enable_http_status_push=ActiveMaster.is_production_host, order_console_by_time=True, console_repo_filter=ActiveMaster.repo_url, console_builder_filter=lambda b: not builder_name_schema.IsTrybot(b)) with status_json.JsonStatusHelper() as json_helper: json_helper.putChild('trybots', status_json.TryBuildersJsonResource) if ActiveMaster.is_production_host: # Build result emails. c['status'].append( skia_notifier.SkiaMailNotifier( fromaddr=ActiveMaster.from_address, mode='change', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Try job result emails. c['status'].append( skia_notifier.SkiaTryMailNotifier( fromaddr=ActiveMaster.from_address, subject="try %(result)s for %(reason)s @ r%(revision)s", mode='all', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Rietveld status push. c['status'].append( TryServerHttpStatusPush(serverUrl=ActiveMaster.code_review_site)) return c
def SpawnSubdirBuildbotsIfNeeded(): """Creates "nested/*" directory structure and spawns other bots on host as needed. Blocks on liveliness of the subdir buildbot processes if script is called with --nodaemon. Returns: Boolean indicating if subdir buildbots are used. """ # Ensure this is not a subdir buildbot itself. Subdir buildbots within # subdir buildbots are not supported. if chromium_utils.GetActiveSubdir(): return False subdirs = _GetSubdirBuildbotPaths() if not subdirs: # No subdir buildbots required. Continue with the main buildbot process. return False # Checking the subdir twistd pids is implemented for posix only. assert os.name == 'posix', 'Can only us subdir buildbots with posix.' print 'Spawning other slaves on this host as needed.' print 'Run make stopall to terminate.' for botdir in subdirs: if not os.path.exists(SUBDIR_ROOT): print 'Creating %s' % SUBDIR_ROOT os.mkdir(SUBDIR_ROOT) def GClientCall(command, fail_ok=False): # We just synced depot_tools, so disable gclient auto-sync. # pylint: disable=cell-var-from-loop env = EnvWithDepotTools(DEPOT_TOOLS_UPDATE='0') try: subprocess.check_call( [GetGClientPath()] + command, env=env, cwd=botdir) except Exception as e: if fail_ok: print >> sys.stderr, e print >> sys.stderr, 'gclient failed; proceeding anyway...' else: raise gclient_solutions = chromium_utils.ParsePythonCfg( os.path.join(ROOT_DIR, '.gclient')).get('solutions', []) assert len(gclient_solutions) == 1 if not os.path.exists(botdir): print 'Creating %s' % botdir os.mkdir(botdir) GClientCall(['config', gclient_solutions[0]['url'], '--deps-file', gclient_solutions[0]['deps_file']]) # Allow failures, e.g. some hooks occasionally fail. Otherwise we # wouldn't copy the pw file and then never exercise this path again. GClientCall(['sync', '--break_repo_locks'], fail_ok=True) shutil.copyfile( os.path.join(BUILD_DIR, 'site_config', '.bot_password'), os.path.join(botdir, 'build', 'site_config', '.bot_password')) if os.path.exists(GetBotoFilePath()): shutil.copyfile( GetBotoFilePath(), GetBotoFilePath(build=os.path.join(botdir, 'build')), ) bot_slavedir = os.path.join(botdir, 'build', 'slave') twistd_pid_file = os.path.join(bot_slavedir, 'twistd.pid') if (not os.path.exists(twistd_pid_file) or not _CheckTwistdRuns(twistd_pid_file)): print 'Spawning slave in %s' % bot_slavedir subprocess.check_call(['make', 'start'], cwd=bot_slavedir) if '--nodaemon' in sys.argv: # Block on liveliness of the subdir buildbots if called with --nodaemon. _CheckSubdirBuildbotLiveliness() return True
def main(argv): parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action='count', default=0) options, args = parser.parse_args(argv[1:]) if args: parser.error('Unknown args: %s' % args) levels = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG) logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) # Remove site_config's we don't add ourselves. Can cause issues when running # this test under a buildbot-spawned process. sys.path = [x for x in sys.path if not x.endswith('site_config')] base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) build_internal = os.path.join(os.path.dirname(base_dir), 'build_internal') sys.path.extend( os.path.normpath(os.path.join(base_dir, d)) for d in ( 'site_config', os.path.join(build_internal, 'site_config'), )) public_masters = { 'master.chromium': 'Chromium', 'master.chromium.android': 'ChromiumAndroid', 'master.chromium.chrome': 'ChromiumChrome', 'master.chromium.chromedriver': 'ChromiumChromeDriver', 'master.chromium.chromiumos': 'ChromiumChromiumos', 'master.chromium.fyi': 'ChromiumFYI', 'master.chromium.gatekeeper': 'Gatekeeper', 'master.chromium.goma': 'ChromiumGoma', 'master.chromium.gpu': 'ChromiumGPU', 'master.chromium.gpu.fyi': 'ChromiumGPUFYI', 'master.chromium.infra': 'Infra', 'master.chromium.infra.cron': 'InfraCron', 'master.chromium.linux': 'ChromiumLinux', 'master.chromium.lkgr': 'ChromiumLKGR', 'master.chromium.mac': 'ChromiumMac', 'master.chromium.memory': 'ChromiumMemory', 'master.chromium.memory.fyi': 'ChromiumMemoryFYI', 'master.chromium.mojo': 'ChromiumMojo', 'master.chromium.perf': 'ChromiumPerf', 'master.chromium.perf.fyi': 'ChromiumPerfFyi', 'master.chromium.swarm': 'ChromiumSwarm', 'master.chromium.tools.build': 'ChromiumToolsBuild', 'master.chromium.webkit': 'ChromiumWebkit', 'master.chromium.webrtc': 'ChromiumWebRTC', 'master.chromium.webrtc.fyi': 'ChromiumWebRTCFYI', 'master.chromium.win': 'ChromiumWin', 'master.chromiumos': 'ChromiumOS', 'master.chromiumos.chromium': 'ChromiumOSChromium', 'master.chromiumos.tryserver': 'ChromiumOSTryServer', 'master.client.art': 'ART', 'master.client.boringssl': 'Boringssl', 'master.client.catapult': 'Catapult', 'master.client.crashpad': 'ClientCrashpad', 'master.client.dart': 'Dart', 'master.client.dart.fyi': 'DartFYI', 'master.client.dart.packages': 'DartPackages', 'master.client.drmemory': 'DrMemory', 'master.client.dynamorio': 'DynamoRIO', 'master.client.fletch': 'Dartino', 'master.client.flutter': 'ClientFlutter', 'master.client.gyp': 'GYP', 'master.client.legion': 'ClientLegion', 'master.client.libyuv': 'Libyuv', 'master.client.libvpx': 'Libvpx', 'master.client.mojo': 'Mojo', 'master.client.nacl': 'NativeClient', 'master.client.nacl.ports': 'WebPorts', 'master.client.nacl.sdk': 'NativeClientSDK', 'master.client.nacl.toolchain': 'NativeClientToolchain', 'master.client.ndk': 'NDK', 'master.client.pdfium': 'Pdfium', 'master.client.skia': 'Skia', 'master.client.skia.android': 'SkiaAndroid', 'master.client.skia.compile': 'SkiaCompile', 'master.client.skia.fyi': 'SkiaFYI', 'master.client.syzygy': 'Syzygy', 'master.client.v8': 'V8', 'master.client.v8.branches': 'V8Branches', 'master.client.v8.fyi': 'V8FYI', 'master.client.v8.ports': 'V8Ports', 'master.client.wasm.llvm': 'WasmLlvm', 'master.client.webrtc': 'WebRTC', 'master.client.webrtc.fyi': 'WebRTCFYI', 'master.tryserver.chromium.android': 'TryserverChromiumAndroid', 'master.tryserver.chromium.angle': 'TryServerANGLE', 'master.tryserver.chromium.linux': 'TryServerChromiumLinux', 'master.tryserver.chromium.mac': 'TryServerChromiumMac', 'master.tryserver.chromium.win': 'TryServerChromiumWin', 'master.tryserver.chromium.perf': 'ChromiumPerfTryServer', 'master.tryserver.client.catapult': 'CatapultTryserver', 'master.tryserver.client.custom_tabs_client': 'CustomTabsClientTryserver', 'master.tryserver.client.mojo': 'MojoTryServer', 'master.tryserver.client.pdfium': 'PDFiumTryserver', 'master.tryserver.client.syzygy': 'SyzygyTryserver', 'master.tryserver.blink': 'BlinkTryServer', 'master.tryserver.infra': 'InfraTryServer', 'master.tryserver.libyuv': 'LibyuvTryServer', 'master.tryserver.nacl': 'NativeClientTryServer', 'master.tryserver.v8': 'V8TryServer', 'master.tryserver.webrtc': 'WebRTCTryServer', } all_masters = {base_dir: public_masters} if os.path.exists(build_internal): internal_test_data = chromium_utils.ParsePythonCfg(os.path.join( build_internal, 'tests', 'internal_masters_cfg.py'), fail_hard=True) all_masters[build_internal] = internal_test_data['masters_test'] return real_main(all_masters)
def SetupMaster(ActiveMaster): # Buildmaster config dict. c = {} config.DatabaseSetup(c) ####### CHANGESOURCES # Polls config.Master.trunk_url for changes poller = GitilesPoller( repo_url=ActiveMaster.repo_url, branches=[POLLING_BRANCH], pollInterval=10, revlinktmpl='https://skia.googlesource.com/skia/+/%s') c['change_source'] = [poller] ####### SLAVES # Load the slave list. We need some information from it in order to # produce the builders. slaves = slaves_list.SlavesList('slaves.cfg', ActiveMaster.project_name) ####### BUILDERS # Load the builders list. builders = chromium_utils.ParsePythonCfg('builders.cfg')['builders'] # Configure the Builders and Schedulers. SetupBuildersAndSchedulers(c=c, builders=builders, slaves=slaves, ActiveMaster=ActiveMaster) ####### BUILDSLAVES # The 'slaves' list defines the set of allowable buildslaves. List all the # slaves registered to a builder. Remove dupes. c['slaves'] = master_utils.AutoSetupSlaves(c['builders'], config.Master.GetBotPassword()) master_utils.VerifySetup(c, slaves) ####### STATUS TARGETS c['buildbotURL'] = ActiveMaster.buildbot_url # Adds common status and tools to this master. master_utils.AutoSetupMaster( c, ActiveMaster, public_html='../../../build/masters/master.client.skia/public_html', templates=[ '../../../build/masters/master.client.skia/templates', '../../../build/masters/master.chromium/templates' ], tagComparator=poller.comparator, enable_http_status_push=ActiveMaster.is_production_host, order_console_by_time=True, console_repo_filter=ActiveMaster.repo_url, console_builder_filter=lambda b: not builder_name_schema.IsTrybot(b)) with status_json.JsonStatusHelper() as json_helper: json_helper.putChild('trybots', status_json.TryBuildersJsonResource) if (ActiveMaster.is_production_host and ActiveMaster.project_name != 'SkiaInternal'): # Build result emails. c['status'].append(status_logger.StatusEventLogger()) c['status'].append( skia_notifier.SkiaMailNotifier( fromaddr=ActiveMaster.from_address, mode='change', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Try job result emails. c['status'].append( skia_notifier.SkiaTryMailNotifier( fromaddr=ActiveMaster.from_address, subject="try %(result)s for %(reason)s @ r%(revision)s", mode='all', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Push status updates to GrandCentral. c['status'].append( HttpStatusPush(serverUrl='https://grandcentral.skia.org/buildbot')) c['mergeRequests'] = CanMergeBuildRequests ###### LOGS # Skia bots have been known to have run away builds continously dumping to # stdout and creating ~100GB logs. See crbug.com/589654 for context. c['logMaxSize'] = 1024 * 1024 * 100 # 100MB c['logMaxTailSize'] = 1024 * 32 # 32KB return c