コード例 #1
0
 def testPrintBuildbotFunctionsNoMarker(self):
     """PrintBuildbot* without markers should not be recognized by buildbot."""
     self.AssertLogContainsMsg(
         '@@@STEP_LINK@',
         lambda: logging.PrintBuildbotLink('name', 'url'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg(
         '@@@@STEP_TEXT@',
         lambda: logging.PrintBuildbotStepText('text'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
                               logging.PrintBuildbotStepWarnings,
                               check_stderr=True,
                               invert=True)
     self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
                               logging.PrintBuildbotStepFailure,
                               check_stderr=True,
                               invert=True)
     self.AssertLogContainsMsg(
         '@@@BUILD_STEP',
         lambda: logging.PrintBuildbotStepName('name'),
         check_stderr=True,
         invert=True)
     self.AssertLogContainsMsg('@@@SET_BUILD_PROPERTY',
                               lambda: logging.PrintKitchenSetBuildProperty(
                                   'name', {'a': 'value'}),
                               check_stderr=True,
                               invert=True)
コード例 #2
0
 def testPrintBuildbotFunctionsWithMarker(self):
     """PrintBuildbot* with markers should be recognized by buildbot."""
     logging.EnableBuildbotMarkers()
     self.AssertLogContainsMsg(
         '@@@STEP_LINK@name@url@@@',
         lambda: logging.PrintBuildbotLink('name', 'url'),
         check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@STEP_TEXT@text@@@',
         lambda: logging.PrintBuildbotStepText('text'),
         check_stderr=True)
     self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
                               logging.PrintBuildbotStepWarnings,
                               check_stderr=True)
     self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
                               logging.PrintBuildbotStepFailure,
                               check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@BUILD_STEP@name@@@',
         lambda: logging.PrintBuildbotStepName('name'),
         check_stderr=True)
     self.AssertLogContainsMsg(
         '@@@SET_BUILD_PROPERTY@name@"value"@@@',
         lambda: logging.PrintKitchenSetBuildProperty('name', 'value'),
         check_stderr=True)
コード例 #3
0
def PrintUprevMetadata(build_branch, stable_candidate, new_ebuild):
  """Shows metadata on buildbot page at UprevAndroid step.

  Args:
    build_branch: The branch of Android builds.
    stable_candidate: The existing stable ebuild.
    new_ebuild: The newly written ebuild.
  """
  # Examples:
  # "android-container-pi revved 6461825-r1 -> 6468247-r1"
  # "android-container-pi revved 6461825-r1 -> 6461825-r2 (ebuild update only)"
  msg = '%s revved %s -> %s' % (stable_candidate.pkgname,
                                stable_candidate.version,
                                new_ebuild.version)

  old_android = stable_candidate.version_no_rev
  new_android = new_ebuild.version_no_rev

  if old_android == new_android:
    msg += ' (ebuild update only)'
  else:
    ab_link = ('https://android-build.googleplex.com'
               '/builds/%s/branches/%s/cls?end=%s'
               % (new_android, build_branch, old_android))
    logging.PrintBuildbotLink('Android changelog', ab_link)

  logging.PrintBuildbotStepText(msg)
  logging.PrintKitchenSetBuildProperty('android_uprev', json.dumps({
      'branch': build_branch,
      'new': new_ebuild.version,
      'old': stable_candidate.version,
      'pkgname': stable_candidate.pkgname,
  }))
コード例 #4
0
def UpdateSelfBuildPropertiesNonBlocking(key, value):
    """Updates the build.output.properties with key:value through a service.

  Butler is a ChOps service that reads in logs and updates Buildbucket of the
  properties. This method has no guarantees on the timeliness of updating
  the property.

  Args:
    key: name of the property.
    value: value of the property.
  """
    logging.PrintKitchenSetBuildProperty(key, value)
コード例 #5
0
    def PerformStage(self):
        packages = self.GetListOfPackagesToBuild()
        self.VerifyChromeBinpkg(packages)
        if self._record_packages_under_test:
            self.RecordPackagesUnderTest()

        # Set up goma. Use goma iff chrome needs to be built.
        chroot_args = self._SetupGomaIfNecessary()
        run_goma = bool(chroot_args)
        if self._run.options.cache_dir:
            chroot_args = chroot_args or []
            chroot_args += ['--cache-dir', self._run.options.cache_dir]

        # Disable revdep logic on full and release builders. These builders never
        # reuse sysroots, so the revdep logic only causes unnecessary
        # rebuilds in the SDK. The SDK rebuilds sometimes hit build critical
        # packages causing races & build failures.
        clean_build = (self._run.config.build_type == constants.CANARY_TYPE
                       or self._run.config.build_type == constants.FULL_TYPE or
                       self._run.config.build_type == constants.TOOLCHAIN_TYPE)

        # Set property to specify bisection builder job to run for Findit.
        logging.PrintKitchenSetBuildProperty(
            'BISECT_BUILDER', self._current_board + '-postsubmit-tryjob')
        try:
            commands.Build(
                self._build_root,
                self._current_board,
                build_autotest=self._run.ShouldBuildAutotest(),
                usepkg=self._run.config.usepkg_build_packages,
                packages=packages,
                skip_chroot_upgrade=True,
                chrome_root=self._run.options.chrome_root,
                noretry=self._run.config.nobuildretry,
                chroot_args=chroot_args,
                extra_env=self._portage_extra_env,
                run_goma=run_goma,
                build_all_with_goma=self._run.config.build_all_with_goma,
                disable_revdep_logic=clean_build,
            )
        except failures_lib.PackageBuildFailure as ex:
            failure_json = ex.BuildCompileFailureOutputJson()
            failures_filename = os.path.join(self.archive_path,
                                             'BuildCompileFailureOutput.json')
            osutils.WriteFile(failures_filename, failure_json)
            self.UploadArtifact(os.path.basename(failures_filename),
                                archive=False)
            self.PrintDownloadLink(os.path.basename(failures_filename),
                                   text_to_display='BuildCompileFailureOutput')
            gs_url = os.path.join(self.upload_url,
                                  'BuildCompileFailureOutput.json')
            logging.PrintKitchenSetBuildProperty('BuildCompileFailureOutput',
                                                 gs_url)
            raise

        if self._update_metadata:
            # Extract firmware version information from the newly created updater.
            fw_versions = commands.GetFirmwareVersions(self._build_root,
                                                       self._current_board)
            main = fw_versions.main_rw or fw_versions.main
            ec = fw_versions.ec_rw or fw_versions.ec
            update_dict = {
                'main-firmware-version': main,
                'ec-firmware-version': ec
            }
            self._run.attrs.metadata.UpdateBoardDictWithDict(
                self._current_board, update_dict)

            # Write board metadata update to cidb
            build_identifier, _ = self._run.GetCIDBHandle()
            build_id = build_identifier.cidb_id
            if self.buildstore.AreClientsReady():
                self.buildstore.InsertBoardPerBuild(build_id,
                                                    self._current_board,
                                                    update_dict)

            # Get a list of models supported by this board.
            models = commands.GetModels(self._build_root,
                                        self._current_board,
                                        log_output=False)
            self._run.attrs.metadata.UpdateWithDict({'unibuild': bool(models)})
            if models:
                all_fw_versions = commands.GetAllFirmwareVersions(
                    self._build_root, self._current_board)
                models_data = {}
                for model in models:
                    if model in all_fw_versions:
                        fw_versions = all_fw_versions[model]

                        ec = fw_versions.ec_rw or fw_versions.ec
                        main_ro = fw_versions.main
                        main_rw = fw_versions.main_rw or main_ro

                        # Get the firmware key-id for the current board and model.
                        model_arg = '--model=' + model
                        key_id_list = commands.RunCrosConfigHost(
                            self._build_root, self._current_board,
                            [model_arg, 'get', '/firmware-signing', 'key-id'])
                        key_id = None
                        if len(key_id_list) == 1:
                            key_id = key_id_list[0]

                        models_data[model] = {
                            'main-readonly-firmware-version': main_ro,
                            'main-readwrite-firmware-version': main_rw,
                            'ec-firmware-version': ec,
                            'firmware-key-id': key_id
                        }
                if models_data:
                    self._run.attrs.metadata.UpdateBoardDictWithDict(
                        self._current_board, {'models': models_data})
コード例 #6
0
    def _LinkArtifacts(self, builder_run):
        """Upload an HTML index and uploaded.json for artifacts.

    If there are no artifacts in the archive then do nothing.

    Args:
      builder_run: BuilderRun object for this run.
    """
        archive = builder_run.GetArchive()
        archive_path = archive.archive_path

        boards = builder_run.config.boards
        if boards:
            board_names = ' '.join(boards)
        else:
            boards = [None]
            board_names = '<no board>'

        # See if there are any artifacts found for this run.
        uploaded = os.path.join(archive_path, commands.UPLOADED_LIST_FILENAME)
        if not os.path.exists(uploaded):
            # UPLOADED doesn't exist.  Normal if Archive stage never ran, which
            # is possibly normal.  Regardless, no archive index is needed.
            logging.info('No archived artifacts found for %s run (%s)',
                         builder_run.config.name, board_names)
            return

        logging.PrintKitchenSetBuildProperty('artifact_link',
                                             archive.upload_url)

        uploaded_json = 'uploaded.json'
        commands.GenerateUploadJSON(os.path.join(archive_path, uploaded_json),
                                    archive_path, uploaded)
        commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                    uploaded_json,
                                    debug=self._run.options.debug_forced,
                                    update_list=True,
                                    acl=self.acl)

        if builder_run.config.internal:
            # Internal builds simply link to pantheon directories, which require
            # authenticated access that most Googlers should have.
            artifacts_url = archive.download_url

        else:
            # External builds must allow unauthenticated access to build artifacts.
            # GS doesn't let unauthenticated users browse selected locations without
            # being able to browse everything (which would expose secret stuff).
            # So, we upload an index.html file and link to it instead of the
            # directory.
            title = 'Artifacts Index: %(board)s / %(version)s (%(config)s config)' % {
                'board': board_names,
                'config': builder_run.config.name,
                'version': builder_run.GetVersion(),
            }

            files = osutils.ReadFile(uploaded).splitlines() + [
                '.|Google Storage Index',
                '..|',
            ]

            index = os.path.join(archive_path, 'index.html')

            # TODO (sbasi) crbug.com/362776: Rework the way we do uploading to
            # multiple buckets. Currently this can only be done in the Archive Stage
            # therefore index.html will only end up in the normal Chrome OS bucket.
            commands.GenerateHtmlIndex(index,
                                       files,
                                       title=title,
                                       url_base=gs.GsUrlToHttp(
                                           archive.upload_url))
            commands.UploadArchivedFile(archive_path, [archive.upload_url],
                                        os.path.basename(index),
                                        debug=self._run.options.debug_forced,
                                        acl=self.acl)

            artifacts_url = os.path.join(archive.download_url_file,
                                         'index.html')

        links_build_description = '%s/%s' % (builder_run.config.name,
                                             archive.version)
        logging.PrintBuildbotLink('Artifacts[%s]' % links_build_description,
                                  artifacts_url)
コード例 #7
0
    def PerformStage(self):
        if self._run.config['doc']:
            logging.PrintBuildbotLink('Builder documentation',
                                      self._run.config['doc'])

        WriteBasicMetadata(self._run)

        # This is a heuristic value for |important|, since patches that get applied
        # later in the build might change the config. We write it now anyway,
        # because in case the build fails before Sync, it is better to have this
        # heuristic value than None. In BuildReexecutionFinishedStage, we re-write
        # the definitive value.
        self._run.attrs.metadata.UpdateWithDict(
            {'important': self._run.config['important']})

        d = self._run.attrs.metadata.GetDict()

        # BuildStartStage should only run once per build. But just in case it
        # is somehow running a second time, we do not want to insert an additional
        # database entry. Detect if a database entry has been inserted already
        # and if so quit the stage.
        if 'build_id' in d:
            logging.info('Already have build_id %s, not inserting an entry.',
                         d['build_id'])
            return

        # Note: In other build stages we use self._run.GetCIDBHandle to fetch
        # a cidb handle. However, since we don't yet have a build_id, we can't
        # do that here.
        if self.buildstore.AreClientsReady():
            db_type = cidb.CIDBConnectionFactory.GetCIDBConnectionType()
            try:
                build_id = self.buildstore.InsertBuild(
                    builder_name=d['builder-name'],
                    build_number=d['build-number'],
                    build_config=d['bot-config'],
                    bot_hostname=d['bot-hostname'],
                    master_build_id=d['master_build_id'],
                    timeout_seconds=self._GetBuildTimeoutSeconds(),
                    important=d['important'],
                    buildbucket_id=self._run.options.buildbucket_id,
                    branch=self._run.manifest_branch)
            except Exception as e:
                logging.error(
                    'Error: %s\n If the buildbucket_id to insert is '
                    'duplicated to the buildbucket_id of an old build and '
                    'the old build was canceled because of a waterfall '
                    'master restart, please ignore this error. Else, '
                    'the error needs more investigation. More context: '
                    'crbug.com/679974 and crbug.com/685889', e)
                raise e

            self._run.attrs.metadata.UpdateWithDict({
                'build_id': build_id,
                'db_type': db_type
            })
            logging.info('Inserted build_id %s into cidb database type %s.',
                         build_id, db_type)
            logging.PrintBuildbotStepText('database: %s, build_id: %s' %
                                          (db_type, build_id))

            master_build_id = d['master_build_id']
            if master_build_id is not None:
                master_build_status = self.buildstore.GetBuildStatuses(
                    build_ids=[master_build_id])[0]

                if master_build_status['buildbucket_id']:
                    master_url = uri_lib.ConstructMiloBuildUri(
                        master_build_status['buildbucket_id'])
                else:
                    master_url = uri_lib.ConstructDashboardUri(
                        master_build_status['waterfall'],
                        master_build_status['builder_name'],
                        master_build_status['build_number'])
                logging.PrintBuildbotLink('Link to master build', master_url)

        # Set annealing snapshot revision build property for Findit integration.
        if self._run.options.cbb_snapshot_revision:
            logging.PrintKitchenSetBuildProperty(
                'GOT_REVISION', self._run.options.cbb_snapshot_revision)

        # Write the tag metadata last so that a build_id is available.
        WriteTagMetadata(self._run)