def __init__(self, source_repo, manifest_repo, build_names, build_type, incr_type, force, branch, manifest=constants.DEFAULT_MANIFEST, dry_run=True, master=False, lkgm_path_rel=constants.LKGM_MANIFEST): """Initialize an LKGM Manager. Args: source_repo: Repository object for the source code. manifest_repo: Manifest repository for manifest versions/buildspecs. build_names: Identifiers for the build. Must match config_lib entries. If multiple identifiers are provided, the first item in the list must be an identifier for the group. build_type: Type of build. Must be a pfq type. incr_type: How we should increment this version - build|branch|patch force: Create a new manifest even if there are no changes. branch: Branch this builder is running on. manifest: Manifest to use for checkout. E.g. 'full' or 'buildtools'. dry_run: Whether we actually commit changes we make or not. master: Whether we are the master builder. lkgm_path_rel: Path to the LKGM symlink, relative to manifest dir. """ super(LKGMManager, self).__init__(source_repo=source_repo, manifest_repo=manifest_repo, manifest=manifest, build_names=build_names, incr_type=incr_type, force=force, branch=branch, dry_run=dry_run, master=master) self.lkgm_path = os.path.join(self.manifest_dir, lkgm_path_rel) self.compare_versions_fn = _LKGMCandidateInfo.VersionCompare self.build_type = build_type # Chrome PFQ and PFQ's exist at the same time and version separately so they # must have separate subdirs in the manifest-versions repository. if self.build_type == constants.CHROME_PFQ_TYPE: self.rel_working_dir = self.CHROME_PFQ_SUBDIR elif self.build_type == constants.ANDROID_PFQ_TYPE: self.rel_working_dir = self.ANDROID_PFQ_SUBDIR elif self.build_type == constants.TOOLCHAIN_TYPE: self.rel_working_dir = self.TOOLCHAIN_SUBDIR elif config_lib.IsCQType(self.build_type): self.rel_working_dir = self.COMMIT_QUEUE_SUBDIR else: assert config_lib.IsPFQType(self.build_type) self.rel_working_dir = self.LKGM_SUBDIR
def testValidUnifiedMasterConfig(self): """Make sure any unified master configurations are valid.""" for build_name, config in self.all_configs.iteritems(): error = 'Unified config for %s has invalid values' % build_name # Unified masters must be internal and must rev both overlays. if config['master']: self.assertTrue( config['internal'] and config['manifest_version'], error) elif not config['master'] and config['manifest_version']: # Unified slaves can rev either public or both depending on whether # they are internal or not. if not config['internal']: self.assertEqual(config['overlays'], constants.PUBLIC_OVERLAYS, error) elif config_lib.IsCQType(config['build_type']): self.assertEqual(config['overlays'], constants.BOTH_OVERLAYS, error)
def GetSyncInstance(self): """Syncs the tree using one of the distributed sync logic paths. Returns: The instance of the sync stage to run. """ # Determine sync class to use. CQ overrides PFQ bits so should check it # first. if self._run.config.pre_cq: sync_stage = self._GetStageInstance(sync_stages.PreCQSyncStage, self.patch_pool.gerrit_patches) self.completion_stage_class = completion_stages.PreCQCompletionStage self.patch_pool.gerrit_patches = [] elif config_lib.IsCQType(self._run.config.build_type): if self._run.config.do_not_apply_cq_patches: sync_stage = self._GetStageInstance( sync_stages.MasterSlaveLKGMSyncStage) else: sync_stage = self._GetStageInstance(sync_stages.CommitQueueSyncStage) self.completion_stage_class = completion_stages.CommitQueueCompletionStage elif config_lib.IsPFQType(self._run.config.build_type): sync_stage = self._GetStageInstance(sync_stages.MasterSlaveLKGMSyncStage) self.completion_stage_class = ( completion_stages.MasterSlaveSyncCompletionStage) elif config_lib.IsCanaryType(self._run.config.build_type): sync_stage = self._GetStageInstance( sync_stages.ManifestVersionedSyncStage) self.completion_stage_class = ( completion_stages.CanaryCompletionStage) elif self._run.config.build_type == constants.TOOLCHAIN_TYPE: sync_stage = self._GetStageInstance(sync_stages.MasterSlaveLKGMSyncStage) self.completion_stage_class = ( completion_stages.MasterSlaveSyncCompletionStage) else: sync_stage = self._GetStageInstance( sync_stages.ManifestVersionedSyncStage) self.completion_stage_class = ( completion_stages.ManifestVersionedSyncCompletionStage) self.sync_stage = sync_stage return self.sync_stage
def PerformStage(self): overlays, push_overlays = self._ExtractOverlays() staging_branch = None if self.stage_push: if not config_lib.IsMasterChromePFQ(self._run.config): raise ValueError( 'This build must be a master chrome PFQ build ' 'when stage_push is True.') build_id, db = self._run.GetCIDBHandle() # If the master passed BinHostTest and all the important slaves passed # UploadPrebuiltsTest, push uprev commits to a staging_branch. if (self.CheckMasterBinhostTest(db, build_id) and self.CheckSlaveUploadPrebuiltsTest(db, build_id)): staging_branch = ('refs/' + constants.PFQ_REF + '/' + constants.STAGING_PFQ_BRANCH_PREFIX + str(build_id)) assert push_overlays, 'push_overlays must be set to run this stage' # If we're a commit queue, we should clean out our local changes, resync, # and reapply our uprevs. This is necessary so that 1) we are sure to point # at the remote SHA1s, not our local SHA1s; 2) we can avoid doing a # rebase; 3) in the case of failure and staging_branch is None, we don't # submit the changes that were committed locally. # # If we're not a commit queue and the build succeeded, we can skip the # cleanup here. This is a cheap trick so that the Chrome PFQ pushes its # earlier uprev from the SyncChrome stage (it would be a bit tricky to # replicate the uprev here, so we'll leave it alone). # If we're not a commit queue and staging_branch is not None, we can skip # the cleanup here. When staging_branch is not None, we're going to push # the local commits generated in AFDOUpdateEbuild stage to the # staging_branch, cleaning up repository here will wipe out the local # commits. if (config_lib.IsCQType(self._run.config.build_type) or not (self.success or staging_branch is not None)): # Clean up our root and sync down the latest changes that were # submitted. commands.BuildRootGitCleanup(self._build_root) # Sync down the latest changes we have submitted. if self._run.options.sync: next_manifest = self._run.config.manifest repo = self.GetRepoRepository() repo.Sync(next_manifest) # Commit an uprev locally. if self._run.options.uprev and self._run.config.uprev: commands.UprevPackages(self._build_root, self._boards, overlays) # When prebuilts is True, if it's a successful run or staging_branch is # not None for a master-chrome-pfq run, update binhost conf if (self._run.config.prebuilts and (self.success or staging_branch is not None)): confwriter = prebuilts.BinhostConfWriter(self._run) confwriter.Perform() # Push the uprev and binhost commits. commands.UprevPush(self._build_root, push_overlays, self._run.options.debug, staging_branch=staging_branch) if config_lib.IsMasterChromePFQ(self._run.config) and self.success: self._run.attrs.metadata.UpdateWithDict({'UprevvedChrome': True})