def PerformStage(self): self.Initialize() with osutils.TempDir() as tempdir: # Save off the last manifest. fresh_sync = True if os.path.exists(self.repo.directory) and not self._run.options.clobber: old_filename = os.path.join(tempdir, 'old.xml') try: old_contents = self.repo.ExportManifest() except cros_build_lib.RunCommandError as e: logging.warning(str(e)) else: osutils.WriteFile(old_filename, old_contents) fresh_sync = False # Sync. self.ManifestCheckout(self.GetNextManifest()) # Print the blamelist. if fresh_sync: logging.PrintBuildbotStepText('(From scratch)') elif self._run.options.buildbot: lkgm_manager.GenerateBlameList(self.repo, old_filename) # Incremental builds request an additional build before patching changes. if self._run.config.build_before_patching: pre_build_passed = self.RunPrePatchBuild() if not pre_build_passed: logging.PrintBuildbotStepText('Pre-patch build failed.')
def PerformStage(self): chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR) chroot_exists = os.path.isdir(self._build_root) replace = self._run.config.chroot_replace or self.force_chroot_replace pre_ver = None if chroot_exists and not replace: # Make sure the chroot has a valid version before we update it. pre_ver = cros_sdk_lib.GetChrootVersion(chroot_path) if pre_ver is None: logging.PrintBuildbotStepText('Replacing broken chroot') logging.PrintBuildbotStepWarnings() replace = True if not chroot_exists or replace: use_sdk = (self._run.config.use_sdk and not self._run.options.nosdk) pre_ver = None commands.MakeChroot(buildroot=self._build_root, replace=replace, use_sdk=use_sdk, chrome_root=self._run.options.chrome_root, extra_env=self._portage_extra_env, use_image=self._run.config.chroot_use_image, cache_dir=self._run.options.cache_dir) post_ver = cros_sdk_lib.GetChrootVersion(chroot_path) if pre_ver is not None and pre_ver != post_ver: logging.PrintBuildbotStepText('%s->%s' % (pre_ver, post_ver)) else: logging.PrintBuildbotStepText(post_ver)
def PerformStage(self): chrome_atom_to_build = None if self._chrome_rev: if (self._chrome_rev == constants.CHROME_REV_SPEC and self._run.options.chrome_version): self.chrome_version = self._run.options.chrome_version logging.info('Using chrome version from options.chrome_version: %s', self.chrome_version) else: self.chrome_version = self._GetChromeVersionFromMetadata() if self.chrome_version: logging.info('Using chrome version from the metadata dictionary: %s', self.chrome_version) # Perform chrome uprev. try: chrome_atom_to_build = commands.MarkChromeAsStable( self._build_root, self._run.manifest_branch, self._chrome_rev, self._boards, chrome_version=self.chrome_version) except commands.ChromeIsPinnedUprevError as e: # If uprev failed due to a chrome pin, record that failure (so that the # build ultimately fails) but try again without the pin, to allow the # slave to test the newer chrome anyway). chrome_atom_to_build = e.new_chrome_atom if chrome_atom_to_build: results_lib.Results.Record(self.name, e) logging.PrintBuildbotStepFailure() logging.error('Chrome is pinned. Attempting to continue build for ' 'chrome atom %s anyway but build will ultimately fail.', chrome_atom_to_build) logging.info('Deleting pin file at %s and proceeding.', CHROMEPIN_MASK_PATH) osutils.SafeUnlink(CHROMEPIN_MASK_PATH) else: raise kwargs = {} if self._chrome_rev == constants.CHROME_REV_SPEC: kwargs['revision'] = self.chrome_version logging.PrintBuildbotStepText('revision %s' % kwargs['revision']) else: if not self.chrome_version: self.chrome_version = self._run.DetermineChromeVersion() kwargs['tag'] = self.chrome_version logging.PrintBuildbotStepText('tag %s' % kwargs['tag']) useflags = self._run.config.useflags commands.SyncChrome(self._build_root, self._run.options.chrome_root, useflags, **kwargs) if (self._chrome_rev and not chrome_atom_to_build and self._run.options.buildbot and self._run.config.build_type == constants.CHROME_PFQ_TYPE): logging.info('Chrome already uprevved. Nothing else to do.') raise failures_lib.ExitEarlyException( 'SyncChromeStage finished and exited early.')
def PerformStage(self): chrome_atom_to_build = None if self._chrome_rev: if (self._chrome_rev == constants.CHROME_REV_SPEC and self._run.options.chrome_version): self.chrome_version = self._run.options.chrome_version logging.info('Using chrome version from options.chrome_version: %s', self.chrome_version) else: self.chrome_version = self._GetChromeVersionFromMetadata() if self.chrome_version: logging.info('Using chrome version from the metadata dictionary: %s', self.chrome_version) # Perform chrome uprev. try: chrome_atom_to_build = commands.MarkChromeAsStable( self._build_root, self._run.manifest_branch, self._chrome_rev, self._boards, chrome_version=self.chrome_version) except commands.ChromeIsPinnedUprevError as e: # If uprev failed due to a chrome pin, record that failure (so that the # build ultimately fails) but try again without the pin, to allow the # slave to test the newer chrome anyway). chrome_atom_to_build = e.new_chrome_atom if chrome_atom_to_build: results_lib.Results.Record(self.name, e) logging.PrintBuildbotStepFailure() logging.error('Chrome is pinned. Unpinning chrome and continuing ' 'build for chrome atom %s. This stage will be marked ' 'as failed to prevent an uprev.', chrome_atom_to_build) logging.info('Deleting pin file at %s and proceeding.', CHROMEPIN_MASK_PATH) osutils.SafeUnlink(CHROMEPIN_MASK_PATH) else: raise kwargs = {} if self._chrome_rev == constants.CHROME_REV_SPEC: kwargs['revision'] = self.chrome_version logging.PrintBuildbotStepText('revision %s' % kwargs['revision']) else: if not self.chrome_version: self.chrome_version = self._run.DetermineChromeVersion() kwargs['tag'] = self.chrome_version logging.PrintBuildbotStepText('tag %s' % kwargs['tag']) useflags = self._run.config.useflags git_cache_dir = ( self._run.options.chrome_preload_dir or self._run.options.git_cache_dir) commands.SyncChrome(self._build_root, self._run.options.chrome_root, useflags, git_cache_dir=git_cache_dir, **kwargs)
def _AnnotateNoStatBuilders(self, no_stat): """Annotate the build statuses fetched from the Buildbucket. Some builds may fail to upload statuses to GS. If the builds were scheduled by Buildbucket, get the build statuses and annotate the results. Args: no_stat: Config names of the slave builds with None status. """ buildbucket_info_dict = buildbucket_lib.GetBuildInfoDict( self._run.attrs.metadata) for config_name in no_stat: if config_name in buildbucket_info_dict: buildbucket_id = buildbucket_info_dict[ config_name].buildbucket_id assert buildbucket_id is not None, 'buildbucket_id is None' try: content = self.buildbucket_client.GetBuildRequest( buildbucket_id, self._run.options.debug) status = buildbucket_lib.GetBuildStatus(content) result = buildbucket_lib.GetBuildResult(content) text = '%s: [status] %s [result] %s' % (config_name, status, result) if result == constants.BUILDBUCKET_BUILDER_RESULT_FAILURE: failure_reason = buildbucket_lib.GetBuildFailureReason( content) if failure_reason: text += ' [failure_reason] %s' % failure_reason elif result == constants.BUILDBUCKET_BUILDER_RESULT_CANCELED: cancel_reason = buildbucket_lib.GetBuildCancelationReason( content) if cancel_reason: text += ' [cancelation_reason] %s' % cancel_reason dashboard_url = buildbucket_lib.GetBuildURL(content) if dashboard_url: logging.PrintBuildbotLink(text, dashboard_url) else: logging.PrintBuildbotStepText(text) except buildbucket_lib.BuildbucketResponseException as e: logging.error('Cannot get status for %s: %s', config_name, e) logging.PrintBuildbotStepText( 'No status found for build %s buildbucket_id %s' % (config_name, buildbucket_id)) else: logging.PrintBuildbotStepText( "%s wasn't scheduled by master." % config_name)
def PerformStage(self): versions, branches, targets = self._UpdateBoardDictsForAndroidBuildInfo( ) # Unfortunately we can't inspect Android build info in slaves from masters, # so metadata is usually unavailable on masters (e.g. master-release). # An exception is builders uprev'ing Android; those info is available # from configs and metadata. But note that version can be still unspecified. if self._android_rev: uprev_version = _GetAndroidVersionFromMetadata( self._run.attrs.metadata) # |uprev_version| can be not set. if uprev_version: versions.add(uprev_version) uprev_branch = self._run.config.android_import_branch assert uprev_branch branches.add(uprev_branch) # If we uprev Android, branch/version must be consistent. assert len( versions) <= 1, 'Multiple Android versions: %r' % versions assert len( branches) <= 1, 'Multiple Android branches: %r' % branches # If there is a unique one across all the boards, treat it as the version # for the build. # TODO(nya): Represent "N/A" and "Multiple" differently in metadata. def _Aggregate(v): if not v: return (None, 'N/A') elif len(v) == 1: return (v[0], str(v[0])) return (None, 'Multiple') metadata_version, debug_version = _Aggregate(list(versions)) metadata_branch, debug_branch = _Aggregate(list(branches)) metadata_target, debug_target = _Aggregate(list(targets)) # Update the primary metadata and upload it. self._run.attrs.metadata.UpdateKeyDictWithDict( 'version', { 'android': metadata_version, 'android-branch': metadata_branch, 'android-target': metadata_target }) self.UploadMetadata(filename=constants.PARTIAL_METADATA_JSON) # Leave build info in buildbot steps page for convenience. logging.PrintBuildbotStepText('tag %s' % debug_version) logging.PrintBuildbotStepText('branch %s' % debug_branch) logging.PrintBuildbotStepText('target %s' % debug_target)
def _WaitForSigningResults(self, instruction_urls_per_channel, channel_notifier=None): """Do the work of waiting for signer results and logging them. Args: instruction_urls_per_channel: push_image data (see _WaitForPushImage). channel_notifier: Method to call with channel name when ready or None. Raises: ValueError: If the signer result isn't valid json. RunCommandError: If we are unable to download signer results. """ gs_ctx = gs.GSContext(dry_run=self._run.options.debug) try: logging.info('Waiting for signer results.') timeout_util.WaitForReturnTrue( self._CheckForResults, func_args=(gs_ctx, instruction_urls_per_channel, channel_notifier), timeout=self.SIGNING_TIMEOUT, period=self.SIGNING_PERIOD) except timeout_util.TimeoutError: msg = 'Image signing timed out.' logging.error(msg) logging.PrintBuildbotStepText(msg) raise SignerResultsTimeout(msg) # Log all signer results, then handle any signing failures. failures = [] for url_results in self.signing_results.values(): for url, signer_result in url_results.items(): result_description = os.path.basename(url) logging.PrintBuildbotStepText(result_description) logging.info('Received results for: %s', result_description) logging.info(pformat.json(signer_result)) status = self._SigningStatusFromJson(signer_result) if status != constants.SIGNER_STATUS_PASSED: failures.append(result_description) logging.error('Signing failed for: %s', result_description) details = signer_result.get('status', {}).get('details') if details: logging.info('Details:\n%s', details) if failures: logging.error('Failure summary:') for failure in failures: logging.error(' %s', failure) raise SignerFailure(', '.join([str(f) for f in failures]))
def EnsureChrootAtVersion(self, version): """Ensure the current chroot is at version |version|. If our chroot has version, use it. Otherwise, blow away the chroot. Args: version: Version of the chroot to look for. E.g. 6394.0.0-rc3 """ chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR) if version and self.GetChrootVersion(chroot) == version: logging.PrintBuildbotStepText('(Using existing chroot)') else: logging.PrintBuildbotStepText('(Using fresh chroot)') osutils.RmDir(chroot, ignore_missing=True, sudo=True)
def PerformStage(self): new_chroot_dir = 'new-sdk-chroot' tarball_location = os.path.join(self._build_root, SDK_TARBALL_NAME) new_chroot_args = ['--chroot', new_chroot_dir] if self._run.options.chrome_root: new_chroot_args += ['--chrome_root', self._run.options.chrome_root] # Build a new SDK using the provided tarball. chroot_args = new_chroot_args + [ '--download', '--replace', '--nousepkg', '--url', 'file://' + tarball_location ] cros_build_lib.run(['true'], cwd=self._build_root, enter_chroot=True, chroot_args=chroot_args, extra_env=self._portage_extra_env) # Inject the toolchain binpkgs from the previous sdk build. On end user # systems, they'd be fetched from the binpkg mirror, but we don't have one # set up for this local build. pkgdir = os.path.join('var', 'lib', 'portage', 'pkgs') old_pkgdir = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR, pkgdir) new_pkgdir = os.path.join(self._build_root, new_chroot_dir, pkgdir) osutils.SafeMakedirs(new_pkgdir, sudo=True) cros_build_lib.sudo_run(['cp', '-r'] + glob.glob(os.path.join(old_pkgdir, '*')) + [new_pkgdir]) # Now install those toolchains in the new chroot. We skip the chroot # upgrade below which means we need to install the toolchain manually. cmd = [ 'cros_setup_toolchains', '--targets=boards', '--include-boards=%s' % ','.join(self._boards) ] commands.RunBuildScript(self._build_root, cmd, chromite_cmd=True, enter_chroot=True, sudo=True, chroot_args=new_chroot_args, extra_env=self._portage_extra_env) # Build all the boards with the new sdk. for board in self._boards: logging.PrintBuildbotStepText(board) commands.SetupBoard(self._build_root, board, usepkg=True, chroot_upgrade=False, extra_env=self._portage_extra_env, chroot_args=new_chroot_args) commands.Build(self._build_root, board, build_autotest=True, usepkg=False, extra_env=self._portage_extra_env, chroot_args=new_chroot_args, disable_revdep_logic=True)
def PerformStage(self): # Initially get version from metadata in case the initial sync # stage set it. self.android_version = self._GetAndroidVersionFromMetadata() # Need to always iterate through and generate the board-specific # Android version metadata. Each board must be handled separately # since there might be differing builds in the same release group. versions = set([]) for builder_run in self._run.GetUngroupedBuilderRuns(): for board in builder_run.config.boards: try: # Determine the version for each board and record metadata. version = self._run.DetermineAndroidVersion(boards=[board]) builder_run.attrs.metadata.UpdateBoardDictWithDict( board, {'android-container-version': version}) versions.add(version) logging.info('Board %s has Android version %s', board, version) except cbuildbot_run.NoAndroidVersionError as ex: logging.info('Board %s does not contain Android (%s)', board, ex) # If there wasn't a version specified in the manifest but there is # a unique one across all the boards, treat it as the version for the # entire step. if self.android_version is None and len(versions) == 1: self.android_version = versions.pop() if self.android_version: logging.PrintBuildbotStepText('tag %s' % self.android_version)
def UploadArtifact(self, path, archive=True, strict=True): """Upload generated artifact to Google Storage. Args: path: Path of local file to upload to Google Storage if |archive| is True. Otherwise, this is the name of the file in self.archive_path. archive: Whether to automatically copy files to the archive dir. strict: Whether to treat upload errors as fatal. """ filename = path if archive: filename = commands.ArchiveFile(path, self.archive_path) upload_urls = self._GetUploadUrls(filename) try: commands.UploadArchivedFile(self.archive_path, upload_urls, filename, self._run.debug, update_list=True, acl=self.acl) except failures_lib.GSUploadFailure as e: logging.PrintBuildbotStepText('Upload failed') if e.HasFatalFailure(whitelist=[ gs.GSContextException, timeout_util.TimeoutError ]): raise elif strict: raise else: # Treat gsutil flake as a warning if it's the only problem. self._HandleExceptionAsWarning(sys.exc_info())
def _AnnotateFailingBuilders(self, failing, inflight, no_stat, statuses): """Add annotations that link to either failing or inflight builders. Adds buildbot links to failing builder dashboards. If no builders are failing, adds links to inflight builders. Adds step text for builders with status None. Args: failing: Set of builder names of slave builders that failed. inflight: Set of builder names of slave builders that are inflight. no_stat: Set of builder names of slave builders that had status None. statuses: A builder-name->status dictionary, which will provide the dashboard_url values for any links. """ builders_to_link = set.union(failing, inflight) for builder in builders_to_link: if statuses[builder].dashboard_url: if statuses[builder].message: text = '%s: %s' % (builder, statuses[builder].message.reason) else: text = '%s: timed out' % builder logging.PrintBuildbotLink(text, statuses[builder].dashboard_url) for builder in no_stat: logging.PrintBuildbotStepText('%s did not start.' % builder)
def PrintUprevMetadata(build_branch, stable_candidate, new_ebuild): """Shows metadata on buildbot page at UprevAndroid step. Args: build_branch: The branch of Android builds. stable_candidate: The existing stable ebuild. new_ebuild: The newly written ebuild. """ # Examples: # "android-container-pi revved 6461825-r1 -> 6468247-r1" # "android-container-pi revved 6461825-r1 -> 6461825-r2 (ebuild update only)" msg = '%s revved %s -> %s' % (stable_candidate.pkgname, stable_candidate.version, new_ebuild.version) old_android = stable_candidate.version_no_rev new_android = new_ebuild.version_no_rev if old_android == new_android: msg += ' (ebuild update only)' else: ab_link = ('https://android-build.googleplex.com' '/builds/%s/branches/%s/cls?end=%s' % (new_android, build_branch, old_android)) logging.PrintBuildbotLink('Android changelog', ab_link) logging.PrintBuildbotStepText(msg) logging.PrintKitchenSetBuildProperty('android_uprev', json.dumps({ 'branch': build_branch, 'new': new_ebuild.version, 'old': stable_candidate.version, 'pkgname': stable_candidate.pkgname, }))
def PerformStage(self): logging.PrintBuildbotStepText(self._version) if self._run.config.prebuilts == constants.PUBLIC: binhost_conf_dir = constants.PUBLIC_BINHOST_CONF_DIR else: binhost_conf_dir = constants.PRIVATE_BINHOST_CONF_DIR sdk_conf = os.path.join(self._build_root, binhost_conf_dir, 'host', 'sdk_version.conf') tc_path_format = prebuilts.GetToolchainSdkUploadFormat( self._version, prebuilts.GetToolchainSdkPaths(self._build_root)[0][1]) sdk_settings = { 'SDK_LATEST_VERSION': self._version, 'TC_PATH': tc_path_format % { 'version': self._version }, } if self._run.options.publish: upload_prebuilts.RevGitFile( sdk_conf, sdk_settings, dryrun=self._run.options.debug) else: logging.info( 'Not updating sdk_version.conf because publishing was disabled.')
def PerformStage(self): chrome_version = self.DetermineChromeVersion() logging.PrintBuildbotStepText('tag %s' % chrome_version) sync_chrome = os.path.join( self._orig_root, 'chromite', 'bin', 'sync_chrome') # Branched gclient can use git-cache incompatibly, so use a temp one. with osutils.TempDir(prefix='dummy') as git_cache: # --reset tells sync_chrome to blow away local changes and to feel # free to delete any directories that get in the way of syncing. This # is needed for unattended operation. # --ignore-locks tells sync_chrome to ignore git-cache locks. # --gclient is not specified here, sync_chrome will locate the one # on the $PATH. cmd = [sync_chrome, '--reset', '--ignore_locks', '--tag', chrome_version, '--git_cache_dir', git_cache] if constants.USE_CHROME_INTERNAL in self._run.config.useflags: cmd += ['--internal'] cmd += [self._run.options.chrome_root] with timeout_util.Timeout(self.SYNC_CHROME_TIMEOUT): retry_util.RunCommandWithRetries( constants.SYNC_RETRIES, cmd, cwd=self._build_root)
def testPrintBuildbotFunctionsWithMarker(self): """PrintBuildbot* with markers should be recognized by buildbot.""" logging.EnableBuildbotMarkers() self.AssertLogContainsMsg( '@@@STEP_LINK@name@url@@@', lambda: logging.PrintBuildbotLink('name', 'url'), check_stderr=True) self.AssertLogContainsMsg( '@@@STEP_TEXT@text@@@', lambda: logging.PrintBuildbotStepText('text'), check_stderr=True) self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@', logging.PrintBuildbotStepWarnings, check_stderr=True) self.AssertLogContainsMsg('@@@STEP_FAILURE@@@', logging.PrintBuildbotStepFailure, check_stderr=True) self.AssertLogContainsMsg( '@@@BUILD_STEP@name@@@', lambda: logging.PrintBuildbotStepName('name'), check_stderr=True) self.AssertLogContainsMsg( '@@@SET_BUILD_PROPERTY@name@"value"@@@', lambda: logging.PrintKitchenSetBuildProperty('name', 'value'), check_stderr=True)
def testPrintBuildbotFunctionsNoMarker(self): """PrintBuildbot* without markers should not be recognized by buildbot.""" self.AssertLogContainsMsg( '@@@STEP_LINK@', lambda: logging.PrintBuildbotLink('name', 'url'), check_stderr=True, invert=True) self.AssertLogContainsMsg( '@@@@STEP_TEXT@', lambda: logging.PrintBuildbotStepText('text'), check_stderr=True, invert=True) self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@', logging.PrintBuildbotStepWarnings, check_stderr=True, invert=True) self.AssertLogContainsMsg('@@@STEP_FAILURE@@@', logging.PrintBuildbotStepFailure, check_stderr=True, invert=True) self.AssertLogContainsMsg( '@@@BUILD_STEP', lambda: logging.PrintBuildbotStepName('name'), check_stderr=True, invert=True) self.AssertLogContainsMsg('@@@SET_BUILD_PROPERTY', lambda: logging.PrintKitchenSetBuildProperty( 'name', {'a': 'value'}), check_stderr=True, invert=True)
def ApplyChange(self, change): if isinstance(change, cros_patch.GerritPatch): logging.PrintBuildbotLink(str(change), change.url) elif isinstance(change, cros_patch.UploadedLocalPatch): logging.PrintBuildbotStepText(str(change)) return patch_series.PatchSeries.ApplyChange(self, change)
def _UpdateLuciProject(self): chromite_source_file = os.path.join(constants.CHROMITE_DIR, 'config', 'luci-scheduler.cfg') generated_source_file = os.path.join(self.project_dir, 'generated', 'luci-scheduler.cfg') target_file = os.path.join(self.project_dir, 'luci', 'luci-scheduler.cfg') concatenated_content = (osutils.ReadFile(chromite_source_file) + '\n\n' + osutils.ReadFile(generated_source_file)) if concatenated_content == osutils.ReadFile(target_file): logging.PrintBuildbotStepText( 'luci-scheduler.cfg current: No Update.') return chromite_rev = git.RunGit( constants.CHROMITE_DIR, ['rev-parse', 'HEAD:config/luci-scheduler.cfg']).output.rstrip() message = textwrap.dedent("""\ luci-scheduler.cfg: Chromite %s Auto update to match generated file in chromite and luci config. """ % chromite_rev) with open(target_file, 'w') as f: f.write(concatenated_content) git.RunGit(self.project_dir, ['add', '-A']) git.RunGit(self.project_dir, ['commit', '-m', message]) push_to = git.RemoteRef('origin', self.PROJECT_BRANCH) logging.info('Pushing to branch (%s) with message: %s %s', push_to, message, ' (dryrun)' if self._run.options.debug else '') git.RunGit(self.project_dir, ['config', 'push.default', 'tracking'], print_cmd=True) git.PushBranch(self.PROJECT_BRANCH, self.project_dir, dryrun=self._run.options.debug) logging.PrintBuildbotStepText('luci-scheduler.cfg: Updated.')
def _PrintBuildMessage(self, text, url=None): """Print the build message. Args: text: Text (string) to print. url: URL (string) to link to the text, default to None. """ if url is not None: logging.PrintBuildbotLink(text, url) else: logging.PrintBuildbotStepText(text)
def PerformStage(self): chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR) # Worksapce chroots are always wiped by cleanup stage, no need to update. cmd = ['cros_sdk', '--create'] + ChrootArgs(self._run.options) commands.RunBuildScript(self._build_root, cmd, chromite_cmd=True, extra_env=self._portage_extra_env) post_ver = cros_sdk_lib.GetChrootVersion(chroot_path) logging.PrintBuildbotStepText(post_ver)
def _AnnotateFailingBuilders(self, failing, inflight, no_stat, statuses, experimental_statuses, self_destructed): """Annotate the failing, inflight and no_stat builds with text and links. Add text and buildbot links to build dashboards for failing builds and inflight builds. For master builds using Buildbucket schdeduler, add text and buildbot links for the no_stat builds; for other master builds, add step text for the no_stat builds. Args: failing: Set of builder names of slave builders that failed. inflight: Set of builder names of slave builders that are inflight. no_stat: Set of builder names of slave builders that had status None. statuses: A builder-name->status dictionary, which will provide the dashboard_url values for any links. experimental_statuses: A builder-name->status dictionary for all slaves that were set as experimental through the tree status. self_destructed: Boolean indicating whether the master build destructed itself and stopped waiting completion of its slaves. """ for build in failing: if statuses[build].message: self._PrintBuildMessage( '%s: %s' % (build, statuses[build].message.reason), statuses[build].dashboard_url) else: self._PrintBuildMessage( '%s: failed due to unknown reasons' % build, statuses[build].dashboard_url) if not self_destructed: for build in inflight: self._PrintBuildMessage('%s: timed out' % build, statuses[build].dashboard_url) self._AnnotateNoStatBuilders(no_stat) else: logging.PrintBuildbotStepText( 'The master destructed itself and stopped ' 'waiting for the following slaves:') for build in inflight: self._PrintBuildMessage('%s: still running' % build, statuses[build].dashboard_url) self._AnnotateNoStatBuilders(no_stat) for build, status in experimental_statuses.items(): if not status.Passed(): self._PrintBuildMessage( '%s: set as experimental through tree status' % build, status.dashboard_url)
def PerformStage(self): # This prepares depot_tools in the source tree, in advance. self.DepotToolsEnsureBootstrap() chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR) replace = self._run.config.chroot_replace or self.force_chroot_replace pre_ver = post_ver = None if os.path.isdir(self._build_root) and not replace: try: pre_ver = cros_sdk_lib.GetChrootVersion(chroot=chroot_path) if pre_ver is not None: commands.RunChrootUpgradeHooks( self._build_root, chrome_root=self._run.options.chrome_root, extra_env=self._portage_extra_env) except failures_lib.BuildScriptFailure: logging.PrintBuildbotStepText('Replacing broken chroot') logging.PrintBuildbotStepWarnings() if not os.path.isdir(chroot_path) or replace: use_sdk = (self._run.config.use_sdk and not self._run.options.nosdk) pre_ver = None commands.MakeChroot(buildroot=self._build_root, replace=replace, use_sdk=use_sdk, chrome_root=self._run.options.chrome_root, extra_env=self._portage_extra_env, use_image=self._run.config.chroot_use_image) post_ver = cros_sdk_lib.GetChrootVersion(chroot=chroot_path) if pre_ver is not None and pre_ver != post_ver: logging.PrintBuildbotStepText('%s->%s' % (pre_ver, post_ver)) else: logging.PrintBuildbotStepText(post_ver) commands.SetSharedUserPassword( self._build_root, password=self._run.config.shared_user_password)
def PerformStage(self): chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR) replace = self._run.config.chroot_replace or self.force_chroot_replace pre_ver = post_ver = None if os.path.isdir(self._build_root) and not replace: try: pre_ver = cros_build_lib.GetChrootVersion(chroot=chroot_path) commands.RunChrootUpgradeHooks( self._build_root, chrome_root=self._run.options.chrome_root, extra_env=self._portage_extra_env) except failures_lib.BuildScriptFailure: logging.PrintBuildbotStepText('Replacing broken chroot') logging.PrintBuildbotStepWarnings() else: # Clear the chroot manifest version as we are in the middle of building. chroot_manager = chroot_lib.ChrootManager(self._build_root) chroot_manager.ClearChrootVersion() if not os.path.isdir(chroot_path) or replace: use_sdk = (self._run.config.use_sdk and not self._run.options.nosdk) pre_ver = None commands.MakeChroot(buildroot=self._build_root, replace=replace, use_sdk=use_sdk, chrome_root=self._run.options.chrome_root, extra_env=self._portage_extra_env) post_ver = cros_build_lib.GetChrootVersion(chroot=chroot_path) if pre_ver is not None and pre_ver != post_ver: logging.PrintBuildbotStepText('%s->%s' % (pre_ver, post_ver)) else: logging.PrintBuildbotStepText(post_ver) commands.SetSharedUserPassword( self._build_root, password=self._run.config.shared_user_password)
def InitialCheckout(repo): """Preliminary ChromeOS checkout. Perform a complete checkout of ChromeOS on the specified branch. This does NOT match what the build needs, but ensures the buildroot both has a 'hot' checkout, and is close enough that the branched cbuildbot can successfully get the right checkout. This checks out full ChromeOS, even if a ChromiumOS build is going to be performed. This is because we have no knowledge of the build config to be used. Args: repo: repository.RepoRepository instance. """ logging.PrintBuildbotStepText('Branch: %s' % repo.branch) logging.info('Bootstrap script starting initial sync on branch: %s', repo.branch) repo.Sync(detach=True)
def PerformStage(self): """Wait for payloads to be staged and uploads its au control files.""" # Wait for UploadHWTestArtifacts to generate the payloads. if not self.GetParallel('delta_payloads_generated', pretty_name='delta payloads'): logging.PrintBuildbotStepText('Missing delta payloads.') logging.PrintBuildbotStepWarnings() logging.warning( 'Cannot run HWTest because UploadTestArtifacts failed. ' 'See UploadTestArtifacts for details.') return with osutils.TempDir() as tempdir: tarball = commands.BuildAUTestTarball(self._build_root, self._current_board, tempdir, self.version, self.upload_url) self.UploadArtifact(tarball) super(AUTestStage, self).PerformStage()
def PerformStage(self): """Sync stuff!""" logging.info('SubWorkspaceSync') cmd = [ os.path.join(constants.CHROMITE_DIR, 'scripts', 'repo_sync_manifest'), '--repo-root', self._build_root, '--manifest-versions-int', self.int_manifest_versions_path, '--manifest-versions-ext', self.ext_manifest_versions_path, ] if self.external: cmd += ['--external'] if self.branch and not self.version: cmd += ['--branch', self.branch] if self.version: logging.PrintBuildbotStepText('Version: %s' % self.version) cmd += ['--version', self.version] if self.patch_pool: patch_options = [] for patch in self.patch_pool: logging.PrintBuildbotLink(str(patch), patch.url) patch_options += ['--gerrit-patches', patch.gerrit_number_str] cmd += patch_options if self.copy_repo: cmd += ['--copy-repo', self.copy_repo] assert not (self.version and self.patch_pool), ( 'Can\'t cherry-pick "%s" into an official version "%s."' % (patch_options, self.version)) cros_build_lib.run(cmd)
def PerformStage(self): """Increment ChromeOS version, and publish buildpec.""" repo = self.GetWorkspaceRepo() # TODO: Add 'patch' support somehow, if repo.branch == 'master': incr_type = 'build' else: incr_type = 'branch' build_spec_path = manifest_version.GenerateAndPublishOfficialBuildSpec( repo, incr_type, manifest_versions_int=self.int_manifest_versions_path, manifest_versions_ext=self.ext_manifest_versions_path, dryrun=self._run.options.debug) if self._run.options.debug: msg = 'DEBUG: Would have defined: %s' % build_spec_path else: msg = 'Defined: %s' % build_spec_path logging.PrintBuildbotStepText(msg)
def GetNextManifest(self): """Uses the initialized manifest manager to get the next manifest.""" assert self.manifest_manager, \ 'Must run GetStageManager before checkout out build.' build_id = self._run.attrs.metadata.GetDict().get('build_id') to_return = self.manifest_manager.GetNextBuildSpec(build_id=build_id) logging.info('Found next version to build: %s', to_return) previous_version = self.manifest_manager.GetLatestPassingSpec() target_version = self.manifest_manager.current_version # Print the Blamelist here. url_prefix = 'https://crosland.corp.google.com/log/' url = url_prefix + '%s..%s' % (previous_version, target_version) logging.PrintBuildbotLink('Blamelist', url) # The testManifestVersionedSyncOnePartBranch interacts badly with this # function. It doesn't fully initialize self.manifest_manager which # causes target_version to be None. Since there isn't a clean fix in # either direction, just throw this through str(). In the normal case, # it's already a string anyways. logging.PrintBuildbotStepText(str(target_version)) return to_return
def ForceVersion(self, version): """Creates a manifest manager from given version and returns manifest.""" logging.PrintBuildbotStepText(version) return self.manifest_manager.BootstrapFromVersion(version)