def CanMergeBuildRequests(req1, req2): """Determine whether or not two BuildRequests can be merged. Rewrite of buildbot.sourcestamp.SourceStamp.canBeMergedWith(), which verifies that: 1. req1.source.repository == req2.source.repository 2. req1.source.project == req2.source.project 3. req1.source.branch == req2.source.branch 4. req1.patch == None and req2.patch = None 5. (req1.source.changes and req2.source.changes) or \ (not req1.source.changes and not req2.source.changes and \ req1.source.revision == req2.source.revision) Of the above, we want 1, 2, 3, and 5. Instead of 4, we want to make sure that neither request is a Trybot request. """ # Verify that the repositories are the same (#1 above). if req1.source.repository != req2.source.repository: return False # Verify that the projects are the same (#2 above). if req1.source.project != req2.source.project: return False # Verify that the branches are the same (#3 above). if req1.source.branch != req2.source.branch: return False # If either is a try request, don't merge (#4 above). if (builder_name_schema.IsTrybot(req1.buildername) or builder_name_schema.IsTrybot(req2.buildername)): return False # Verify that either: both requests are associated with changes OR neither # request is associated with a change but the revisions match (#5 above). if req1.source.changes and not req2.source.changes: return False if not req1.source.changes and req2.source.changes: return False if req1.source.changes and req2.source.changes: for ch in (req1.source.changes + req2.source.changes): if KEYWORD_NO_MERGE_BUILDS in ch.comments: return False else: if req1.source.revision != req2.source.revision: return False return True
def _UploadJSONResults(builder_name, build_number, dest_gsbase, gs_subdir, full_json_path, gzipped=True, gsutil_path='gsutil', issue_number=None): now = datetime.utcnow() gs_json_path = '/'.join((str(now.year).zfill(4), str(now.month).zfill(2), str(now.day).zfill(2), str(now.hour).zfill(2))) gs_dir = '/'.join((gs_subdir, gs_json_path, builder_name)) if builder_name_schema.IsTrybot(builder_name): if not issue_number: raise Exception('issue_number build property is missing!') gs_dir = '/'.join(('trybot', gs_dir, build_number, issue_number)) full_path_to_upload = full_json_path file_to_upload = os.path.basename(full_path_to_upload) http_header = ['Content-Type:application/json'] if gzipped: http_header.append('Content-Encoding:gzip') gzipped_file = os.path.join(tempfile.gettempdir(), file_to_upload) # Apply gzip. with open(full_path_to_upload, 'rb') as f_in: with gzip.open(gzipped_file, 'wb') as f_out: f_out.writelines(f_in) full_path_to_upload = gzipped_file cmd = ['python', gsutil_path] for header in http_header: cmd.extend(['-h', header]) cmd.extend(['cp', '-a', 'public-read', full_path_to_upload, '/'.join((dest_gsbase, gs_dir, file_to_upload))]) print ' '.join(cmd) subprocess.check_call(cmd)
def __init__(self, status): status_json.JsonResource.__init__(self, status) for builder_name in self.status.getBuilderNames(): if builder_name_schema.IsTrybot(builder_name): self.putChild( builder_name, status_json.BuilderJsonResource( status, status.getBuilder(builder_name)))
def skia(c): """Base config for Skia.""" c.builder_cfg = builder_name_schema.DictForBuilderName(c.BUILDER_NAME) c.build_targets = build_targets_from_builder_dict(c.builder_cfg) c.configuration = c.builder_cfg.get('configuration', CONFIG_DEBUG) c.role = c.builder_cfg['role'] c.do_test_steps = c.role == builder_name_schema.BUILDER_ROLE_TEST c.do_perf_steps = (c.role == builder_name_schema.BUILDER_ROLE_PERF or (c.role == builder_name_schema.BUILDER_ROLE_TEST and c.configuration == CONFIG_DEBUG) or 'Valgrind' in c.BUILDER_NAME) c.gyp_env.GYP_DEFINES.update(gyp_defs_from_builder_dict(c.builder_cfg)) c.slave_cfg = slaves_cfg.get(c.MASTER_NAME)[c.SLAVE_NAME] c.is_trybot = builder_name_schema.IsTrybot(c.BUILDER_NAME)
def SetupMaster(ActiveMaster): # Buildmaster config dict. c = {} config.DatabaseSetup(c, require_dbconfig=ActiveMaster.is_production_host) ####### CHANGESOURCES # Polls config.Master.trunk_url for changes poller = GitilesPoller( repo_url=ActiveMaster.repo_url, branches=['master'], pollInterval=10, revlinktmpl='https://skia.googlesource.com/skia/+/%s') c['change_source'] = [poller] ####### SLAVES # Load the slave list. We need some information from it in order to # produce the builders. slaves = slaves_list.SlavesList('slaves.cfg', ActiveMaster.project_name) ####### BUILDERS # Load the builders list. builders = chromium_utils.ParsePythonCfg('builders.cfg')['builders'] # Configure the Builders and Schedulers. SetupBuildersAndSchedulers(c=c, builders=builders, slaves=slaves, ActiveMaster=ActiveMaster) ####### BUILDSLAVES # The 'slaves' list defines the set of allowable buildslaves. List all the # slaves registered to a builder. Remove dupes. c['slaves'] = master_utils.AutoSetupSlaves(c['builders'], config.Master.GetBotPassword()) master_utils.VerifySetup(c, slaves) ####### STATUS TARGETS c['buildbotURL'] = ActiveMaster.buildbot_url # Adds common status and tools to this master. master_utils.AutoSetupMaster( c, ActiveMaster, public_html='../../../build/masters/master.client.skia/public_html', templates=[ '../../../build/masters/master.client.skia/templates', '../../../build/masters/master.chromium/templates' ], tagComparator=poller.comparator, enable_http_status_push=ActiveMaster.is_production_host, order_console_by_time=True, console_repo_filter=ActiveMaster.repo_url, console_builder_filter=lambda b: not builder_name_schema.IsTrybot(b)) with status_json.JsonStatusHelper() as json_helper: json_helper.putChild('trybots', status_json.TryBuildersJsonResource) if ActiveMaster.is_production_host: # Build result emails. c['status'].append( skia_notifier.SkiaMailNotifier( fromaddr=ActiveMaster.from_address, mode='change', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Try job result emails. c['status'].append( skia_notifier.SkiaTryMailNotifier( fromaddr=ActiveMaster.from_address, subject="try %(result)s for %(reason)s @ r%(revision)s", mode='all', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Rietveld status push. c['status'].append( TryServerHttpStatusPush(serverUrl=ActiveMaster.code_review_site)) return c
def SetupMaster(ActiveMaster): # Buildmaster config dict. c = {} config.DatabaseSetup(c) ####### CHANGESOURCES # Polls config.Master.trunk_url for changes poller = GitilesPoller( repo_url=ActiveMaster.repo_url, branches=[POLLING_BRANCH], pollInterval=10, revlinktmpl='https://skia.googlesource.com/skia/+/%s') c['change_source'] = [poller] ####### SLAVES # Load the slave list. We need some information from it in order to # produce the builders. slaves = slaves_list.SlavesList('slaves.cfg', ActiveMaster.project_name) ####### BUILDERS # Load the builders list. builders = chromium_utils.ParsePythonCfg('builders.cfg')['builders'] # Configure the Builders and Schedulers. SetupBuildersAndSchedulers(c=c, builders=builders, slaves=slaves, ActiveMaster=ActiveMaster) ####### BUILDSLAVES # The 'slaves' list defines the set of allowable buildslaves. List all the # slaves registered to a builder. Remove dupes. c['slaves'] = master_utils.AutoSetupSlaves(c['builders'], config.Master.GetBotPassword()) master_utils.VerifySetup(c, slaves) ####### STATUS TARGETS c['buildbotURL'] = ActiveMaster.buildbot_url # Adds common status and tools to this master. master_utils.AutoSetupMaster( c, ActiveMaster, public_html='../../../build/masters/master.client.skia/public_html', templates=[ '../../../build/masters/master.client.skia/templates', '../../../build/masters/master.chromium/templates' ], tagComparator=poller.comparator, enable_http_status_push=ActiveMaster.is_production_host, order_console_by_time=True, console_repo_filter=ActiveMaster.repo_url, console_builder_filter=lambda b: not builder_name_schema.IsTrybot(b)) with status_json.JsonStatusHelper() as json_helper: json_helper.putChild('trybots', status_json.TryBuildersJsonResource) if (ActiveMaster.is_production_host and ActiveMaster.project_name != 'SkiaInternal'): # Build result emails. c['status'].append(status_logger.StatusEventLogger()) c['status'].append( skia_notifier.SkiaMailNotifier( fromaddr=ActiveMaster.from_address, mode='change', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Try job result emails. c['status'].append( skia_notifier.SkiaTryMailNotifier( fromaddr=ActiveMaster.from_address, subject="try %(result)s for %(reason)s @ r%(revision)s", mode='all', relayhost=config.Master.smtp, lookup=master_utils.UsersAreEmails())) # Push status updates to GrandCentral. c['status'].append( HttpStatusPush(serverUrl='https://grandcentral.skia.org/buildbot')) c['mergeRequests'] = CanMergeBuildRequests ###### LOGS # Skia bots have been known to have run away builds continously dumping to # stdout and creating ~100GB logs. See crbug.com/589654 for context. c['logMaxSize'] = 1024 * 1024 * 100 # 100MB c['logMaxTailSize'] = 1024 * 32 # 32KB return c
def buildMessage(self, name, build, results): if builder_name_schema.IsTrybot(build[0].getBuilder().name): return TryMailNotifier.buildMessage(self, name, build, results)
# use an up-to-date Skia. # This list specifies which Chromium builders to "copy". _builders = [ # SPEC Module Test Spec File Builder Names (chromium_linux, 'chromium.linux.json', ['Linux Builder', 'Linux Tests']), (chromium_win, 'chromium.win.json', ['Win Builder', 'Win7 Tests (1)']), (chromium_mac, 'chromium.mac.json', ['Mac Builder', 'Mac10.9 Tests']), ] SPEC = { 'settings': { 'build_gs_bucket': 'chromium-skia-gm', }, 'builders': {}, } for spec_module, test_spec_file, builders_list in _builders: for builder in builders_list: for builder_name in (builder, builder_name_schema.TrybotName(builder)): builder_cfg = copy.deepcopy(spec_module.SPEC['builders'][builder]) builder_cfg['gclient_config'] = 'chromium_skia' parent = builder_cfg.get('parent_buildername') if parent: if builder_name_schema.IsTrybot(builder_name): parent = builder_name_schema.TrybotName(parent) builder_cfg['parent_buildername'] = parent builder_cfg['patch_root'] = 'src/third_party/skia' builder_cfg['testing']['test_spec_file'] = test_spec_file SPEC['builders'][builder_name] = builder_cfg